1 2 /* 3 Plots vectors obtained with DMDACreate2d() 4 */ 5 6 #include <petsc/private/dmdaimpl.h> /*I "petscdmda.h" I*/ 7 #include <petsc/private/glvisvecimpl.h> 8 #include <petsc/private/viewerhdf5impl.h> 9 #include <petscdraw.h> 10 11 /* 12 The data that is passed into the graphics callback 13 */ 14 typedef struct { 15 PetscMPIInt rank; 16 PetscInt m, n, dof, k; 17 PetscReal xmin, xmax, ymin, ymax, min, max; 18 const PetscScalar *xy, *v; 19 PetscBool showaxis, showgrid; 20 const char *name0, *name1; 21 } ZoomCtx; 22 23 /* 24 This does the drawing for one particular field 25 in one particular set of coordinates. It is a callback 26 called from PetscDrawZoom() 27 */ 28 PetscErrorCode VecView_MPI_Draw_DA2d_Zoom(PetscDraw draw, void *ctx) { 29 ZoomCtx *zctx = (ZoomCtx *)ctx; 30 PetscInt m, n, i, j, k, dof, id, c1, c2, c3, c4; 31 PetscReal min, max, x1, x2, x3, x4, y_1, y2, y3, y4; 32 const PetscScalar *xy, *v; 33 34 PetscFunctionBegin; 35 m = zctx->m; 36 n = zctx->n; 37 dof = zctx->dof; 38 k = zctx->k; 39 xy = zctx->xy; 40 v = zctx->v; 41 min = zctx->min; 42 max = zctx->max; 43 44 /* PetscDraw the contour plot patch */ 45 PetscDrawCollectiveBegin(draw); 46 for (j = 0; j < n - 1; j++) { 47 for (i = 0; i < m - 1; i++) { 48 id = i + j * m; 49 x1 = PetscRealPart(xy[2 * id]); 50 y_1 = PetscRealPart(xy[2 * id + 1]); 51 c1 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max); 52 53 id = i + j * m + 1; 54 x2 = PetscRealPart(xy[2 * id]); 55 y2 = PetscRealPart(xy[2 * id + 1]); 56 c2 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max); 57 58 id = i + j * m + 1 + m; 59 x3 = PetscRealPart(xy[2 * id]); 60 y3 = PetscRealPart(xy[2 * id + 1]); 61 c3 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max); 62 63 id = i + j * m + m; 64 x4 = PetscRealPart(xy[2 * id]); 65 y4 = PetscRealPart(xy[2 * id + 1]); 66 c4 = PetscDrawRealToColor(PetscRealPart(v[k + dof * id]), min, max); 67 68 PetscCall(PetscDrawTriangle(draw, x1, y_1, x2, y2, x3, y3, c1, c2, c3)); 69 PetscCall(PetscDrawTriangle(draw, x1, y_1, x3, y3, x4, y4, c1, c3, c4)); 70 if (zctx->showgrid) { 71 PetscCall(PetscDrawLine(draw, x1, y_1, x2, y2, PETSC_DRAW_BLACK)); 72 PetscCall(PetscDrawLine(draw, x2, y2, x3, y3, PETSC_DRAW_BLACK)); 73 PetscCall(PetscDrawLine(draw, x3, y3, x4, y4, PETSC_DRAW_BLACK)); 74 PetscCall(PetscDrawLine(draw, x4, y4, x1, y_1, PETSC_DRAW_BLACK)); 75 } 76 } 77 } 78 if (zctx->showaxis && !zctx->rank) { 79 if (zctx->name0 || zctx->name1) { 80 PetscReal xl, yl, xr, yr, x, y; 81 PetscCall(PetscDrawGetCoordinates(draw, &xl, &yl, &xr, &yr)); 82 x = xl + .30 * (xr - xl); 83 xl = xl + .01 * (xr - xl); 84 y = yr - .30 * (yr - yl); 85 yl = yl + .01 * (yr - yl); 86 if (zctx->name0) PetscCall(PetscDrawString(draw, x, yl, PETSC_DRAW_BLACK, zctx->name0)); 87 if (zctx->name1) PetscCall(PetscDrawStringVertical(draw, xl, y, PETSC_DRAW_BLACK, zctx->name1)); 88 } 89 /* 90 Ideally we would use the PetscDrawAxis object to manage displaying the coordinate limits 91 but that may require some refactoring. 92 */ 93 { 94 double xmin = (double)zctx->xmin, ymin = (double)zctx->ymin; 95 double xmax = (double)zctx->xmax, ymax = (double)zctx->ymax; 96 char value[16]; 97 size_t len; 98 PetscReal w; 99 PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmin)); 100 PetscCall(PetscDrawString(draw, xmin, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value)); 101 PetscCall(PetscSNPrintf(value, 16, "%0.2e", xmax)); 102 PetscCall(PetscStrlen(value, &len)); 103 PetscCall(PetscDrawStringGetSize(draw, &w, NULL)); 104 PetscCall(PetscDrawString(draw, xmax - len * w, ymin - .05 * (ymax - ymin), PETSC_DRAW_BLACK, value)); 105 PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymin)); 106 PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymin, PETSC_DRAW_BLACK, value)); 107 PetscCall(PetscSNPrintf(value, 16, "%0.2e", ymax)); 108 PetscCall(PetscDrawString(draw, xmin - .05 * (xmax - xmin), ymax, PETSC_DRAW_BLACK, value)); 109 } 110 } 111 PetscDrawCollectiveEnd(draw); 112 PetscFunctionReturn(0); 113 } 114 115 PetscErrorCode VecView_MPI_Draw_DA2d(Vec xin, PetscViewer viewer) { 116 DM da, dac, dag; 117 PetscInt N, s, M, w, ncoors = 4; 118 const PetscInt *lx, *ly; 119 PetscReal coors[4]; 120 PetscDraw draw, popup; 121 PetscBool isnull, useports = PETSC_FALSE; 122 MPI_Comm comm; 123 Vec xlocal, xcoor, xcoorl; 124 DMBoundaryType bx, by; 125 DMDAStencilType st; 126 ZoomCtx zctx; 127 PetscDrawViewPorts *ports = NULL; 128 PetscViewerFormat format; 129 PetscInt *displayfields; 130 PetscInt ndisplayfields, i, nbounds; 131 const PetscReal *bounds; 132 133 PetscFunctionBegin; 134 zctx.showgrid = PETSC_FALSE; 135 zctx.showaxis = PETSC_TRUE; 136 137 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 138 PetscCall(PetscDrawIsNull(draw, &isnull)); 139 if (isnull) PetscFunctionReturn(0); 140 141 PetscCall(PetscViewerDrawGetBounds(viewer, &nbounds, &bounds)); 142 143 PetscCall(VecGetDM(xin, &da)); 144 PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA"); 145 146 PetscCall(PetscObjectGetComm((PetscObject)xin, &comm)); 147 PetscCallMPI(MPI_Comm_rank(comm, &zctx.rank)); 148 149 PetscCall(DMDAGetInfo(da, NULL, &M, &N, NULL, &zctx.m, &zctx.n, NULL, &w, &s, &bx, &by, NULL, &st)); 150 PetscCall(DMDAGetOwnershipRanges(da, &lx, &ly, NULL)); 151 152 /* 153 Obtain a sequential vector that is going to contain the local values plus ONE layer of 154 ghosted values to draw the graphics from. We also need its corresponding DMDA (dac) that will 155 update the local values pluse ONE layer of ghost values. 156 */ 157 PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsGhosted", (PetscObject *)&xlocal)); 158 if (!xlocal) { 159 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 160 /* 161 if original da is not of stencil width one, or periodic or not a box stencil then 162 create a special DMDA to handle one level of ghost points for graphics 163 */ 164 PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, w, 1, lx, ly, &dac)); 165 PetscCall(DMSetUp(dac)); 166 PetscCall(PetscInfo(da, "Creating auxiliary DMDA for managing graphics ghost points\n")); 167 } else { 168 /* otherwise we can use the da we already have */ 169 dac = da; 170 } 171 /* create local vector for holding ghosted values used in graphics */ 172 PetscCall(DMCreateLocalVector(dac, &xlocal)); 173 if (dac != da) { 174 /* don't keep any public reference of this DMDA, is is only available through xlocal */ 175 PetscCall(PetscObjectDereference((PetscObject)dac)); 176 } else { 177 /* remove association between xlocal and da, because below we compose in the opposite 178 direction and if we left this connect we'd get a loop, so the objects could 179 never be destroyed */ 180 PetscCall(PetscObjectRemoveReference((PetscObject)xlocal, "__PETSc_dm")); 181 } 182 PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsGhosted", (PetscObject)xlocal)); 183 PetscCall(PetscObjectDereference((PetscObject)xlocal)); 184 } else { 185 if (bx != DM_BOUNDARY_NONE || by != DM_BOUNDARY_NONE || s != 1 || st != DMDA_STENCIL_BOX) { 186 PetscCall(VecGetDM(xlocal, &dac)); 187 } else { 188 dac = da; 189 } 190 } 191 192 /* 193 Get local (ghosted) values of vector 194 */ 195 PetscCall(DMGlobalToLocalBegin(dac, xin, INSERT_VALUES, xlocal)); 196 PetscCall(DMGlobalToLocalEnd(dac, xin, INSERT_VALUES, xlocal)); 197 PetscCall(VecGetArrayRead(xlocal, &zctx.v)); 198 199 /* 200 Get coordinates of nodes 201 */ 202 PetscCall(DMGetCoordinates(da, &xcoor)); 203 if (!xcoor) { 204 PetscCall(DMDASetUniformCoordinates(da, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0)); 205 PetscCall(DMGetCoordinates(da, &xcoor)); 206 } 207 208 /* 209 Determine the min and max coordinates in plot 210 */ 211 PetscCall(VecStrideMin(xcoor, 0, NULL, &zctx.xmin)); 212 PetscCall(VecStrideMax(xcoor, 0, NULL, &zctx.xmax)); 213 PetscCall(VecStrideMin(xcoor, 1, NULL, &zctx.ymin)); 214 PetscCall(VecStrideMax(xcoor, 1, NULL, &zctx.ymax)); 215 PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_axis", &zctx.showaxis, NULL)); 216 if (zctx.showaxis) { 217 coors[0] = zctx.xmin - .05 * (zctx.xmax - zctx.xmin); 218 coors[1] = zctx.ymin - .05 * (zctx.ymax - zctx.ymin); 219 coors[2] = zctx.xmax + .05 * (zctx.xmax - zctx.xmin); 220 coors[3] = zctx.ymax + .05 * (zctx.ymax - zctx.ymin); 221 } else { 222 coors[0] = zctx.xmin; 223 coors[1] = zctx.ymin; 224 coors[2] = zctx.xmax; 225 coors[3] = zctx.ymax; 226 } 227 PetscCall(PetscOptionsGetRealArray(NULL, NULL, "-draw_coordinates", coors, &ncoors, NULL)); 228 PetscCall(PetscInfo(da, "Preparing DMDA 2d contour plot coordinates %g %g %g %g\n", (double)coors[0], (double)coors[1], (double)coors[2], (double)coors[3])); 229 230 /* 231 Get local ghosted version of coordinates 232 */ 233 PetscCall(PetscObjectQuery((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject *)&xcoorl)); 234 if (!xcoorl) { 235 /* create DMDA to get local version of graphics */ 236 PetscCall(DMDACreate2d(comm, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_BOX, M, N, zctx.m, zctx.n, 2, 1, lx, ly, &dag)); 237 PetscCall(DMSetUp(dag)); 238 PetscCall(PetscInfo(dag, "Creating auxiliary DMDA for managing graphics coordinates ghost points\n")); 239 PetscCall(DMCreateLocalVector(dag, &xcoorl)); 240 PetscCall(PetscObjectCompose((PetscObject)da, "GraphicsCoordinateGhosted", (PetscObject)xcoorl)); 241 PetscCall(PetscObjectDereference((PetscObject)dag)); 242 PetscCall(PetscObjectDereference((PetscObject)xcoorl)); 243 } else PetscCall(VecGetDM(xcoorl, &dag)); 244 PetscCall(DMGlobalToLocalBegin(dag, xcoor, INSERT_VALUES, xcoorl)); 245 PetscCall(DMGlobalToLocalEnd(dag, xcoor, INSERT_VALUES, xcoorl)); 246 PetscCall(VecGetArrayRead(xcoorl, &zctx.xy)); 247 PetscCall(DMDAGetCoordinateName(da, 0, &zctx.name0)); 248 PetscCall(DMDAGetCoordinateName(da, 1, &zctx.name1)); 249 250 /* 251 Get information about size of area each processor must do graphics for 252 */ 253 PetscCall(DMDAGetInfo(dac, NULL, &M, &N, NULL, NULL, NULL, NULL, &zctx.dof, NULL, &bx, &by, NULL, NULL)); 254 PetscCall(DMDAGetGhostCorners(dac, NULL, NULL, NULL, &zctx.m, &zctx.n, NULL)); 255 PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_contour_grid", &zctx.showgrid, NULL)); 256 257 PetscCall(DMDASelectFields(da, &ndisplayfields, &displayfields)); 258 PetscCall(PetscViewerGetFormat(viewer, &format)); 259 PetscCall(PetscOptionsGetBool(NULL, NULL, "-draw_ports", &useports, NULL)); 260 if (format == PETSC_VIEWER_DRAW_PORTS) useports = PETSC_TRUE; 261 if (useports) { 262 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 263 PetscCall(PetscDrawCheckResizedWindow(draw)); 264 PetscCall(PetscDrawClear(draw)); 265 PetscCall(PetscDrawViewPortsCreate(draw, ndisplayfields, &ports)); 266 } 267 268 /* 269 Loop over each field; drawing each in a different window 270 */ 271 for (i = 0; i < ndisplayfields; i++) { 272 zctx.k = displayfields[i]; 273 274 /* determine the min and max value in plot */ 275 PetscCall(VecStrideMin(xin, zctx.k, NULL, &zctx.min)); 276 PetscCall(VecStrideMax(xin, zctx.k, NULL, &zctx.max)); 277 if (zctx.k < nbounds) { 278 zctx.min = bounds[2 * zctx.k]; 279 zctx.max = bounds[2 * zctx.k + 1]; 280 } 281 if (zctx.min == zctx.max) { 282 zctx.min -= 1.e-12; 283 zctx.max += 1.e-12; 284 } 285 PetscCall(PetscInfo(da, "DMDA 2d contour plot min %g max %g\n", (double)zctx.min, (double)zctx.max)); 286 287 if (useports) { 288 PetscCall(PetscDrawViewPortsSet(ports, i)); 289 } else { 290 const char *title; 291 PetscCall(PetscViewerDrawGetDraw(viewer, i, &draw)); 292 PetscCall(DMDAGetFieldName(da, zctx.k, &title)); 293 if (title) PetscCall(PetscDrawSetTitle(draw, title)); 294 } 295 296 PetscCall(PetscDrawGetPopup(draw, &popup)); 297 PetscCall(PetscDrawScalePopup(popup, zctx.min, zctx.max)); 298 PetscCall(PetscDrawSetCoordinates(draw, coors[0], coors[1], coors[2], coors[3])); 299 PetscCall(PetscDrawZoom(draw, VecView_MPI_Draw_DA2d_Zoom, &zctx)); 300 if (!useports) PetscCall(PetscDrawSave(draw)); 301 } 302 if (useports) { 303 PetscCall(PetscViewerDrawGetDraw(viewer, 0, &draw)); 304 PetscCall(PetscDrawSave(draw)); 305 } 306 307 PetscCall(PetscDrawViewPortsDestroy(ports)); 308 PetscCall(PetscFree(displayfields)); 309 PetscCall(VecRestoreArrayRead(xcoorl, &zctx.xy)); 310 PetscCall(VecRestoreArrayRead(xlocal, &zctx.v)); 311 PetscFunctionReturn(0); 312 } 313 314 #if defined(PETSC_HAVE_HDF5) 315 static PetscErrorCode VecGetHDF5ChunkSize(DM_DA *da, Vec xin, PetscInt dimension, PetscInt timestep, hsize_t *chunkDims) { 316 PetscMPIInt comm_size; 317 hsize_t chunk_size, target_size, dim; 318 hsize_t vec_size = sizeof(PetscScalar) * da->M * da->N * da->P * da->w; 319 hsize_t avg_local_vec_size, KiB = 1024, MiB = KiB * KiB, GiB = MiB * KiB, min_size = MiB; 320 hsize_t max_chunks = 64 * KiB; /* HDF5 internal limitation */ 321 hsize_t max_chunk_size = 4 * GiB; /* HDF5 internal limitation */ 322 PetscInt zslices = da->p, yslices = da->n, xslices = da->m; 323 324 PetscFunctionBegin; 325 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)xin), &comm_size)); 326 avg_local_vec_size = (hsize_t)PetscCeilInt(vec_size, comm_size); /* we will attempt to use this as the chunk size */ 327 328 target_size = (hsize_t)PetscMin((PetscInt64)vec_size, PetscMin((PetscInt64)max_chunk_size, PetscMax((PetscInt64)avg_local_vec_size, PetscMax(PetscCeilInt64(vec_size, max_chunks), (PetscInt64)min_size)))); 329 /* following line uses sizeof(PetscReal) instead of sizeof(PetscScalar) because the last dimension of chunkDims[] captures the 2* when complex numbers are being used */ 330 chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(PetscReal); 331 332 /* 333 if size/rank > max_chunk_size, we need radical measures: even going down to 334 avg_local_vec_size is not enough, so we simply use chunk size of 4 GiB no matter 335 what, composed in the most efficient way possible. 336 N.B. this minimises the number of chunks, which may or may not be the optimal 337 solution. In a BG, for example, the optimal solution is probably to make # chunks = # 338 IO nodes involved, but this author has no access to a BG to figure out how to 339 reliably find the right number. And even then it may or may not be enough. 340 */ 341 if (avg_local_vec_size > max_chunk_size) { 342 /* check if we can just split local z-axis: is that enough? */ 343 zslices = PetscCeilInt(vec_size, da->p * max_chunk_size) * zslices; 344 if (zslices > da->P) { 345 /* lattice is too large in xy-directions, splitting z only is not enough */ 346 zslices = da->P; 347 yslices = PetscCeilInt(vec_size, zslices * da->n * max_chunk_size) * yslices; 348 if (yslices > da->N) { 349 /* lattice is too large in x-direction, splitting along z, y is not enough */ 350 yslices = da->N; 351 xslices = PetscCeilInt(vec_size, zslices * yslices * da->m * max_chunk_size) * xslices; 352 } 353 } 354 dim = 0; 355 if (timestep >= 0) { ++dim; } 356 /* prefer to split z-axis, even down to planar slices */ 357 if (dimension == 3) { 358 chunkDims[dim++] = (hsize_t)da->P / zslices; 359 chunkDims[dim++] = (hsize_t)da->N / yslices; 360 chunkDims[dim++] = (hsize_t)da->M / xslices; 361 } else { 362 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 363 chunkDims[dim++] = (hsize_t)da->N / yslices; 364 chunkDims[dim++] = (hsize_t)da->M / xslices; 365 } 366 chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double); 367 } else { 368 if (target_size < chunk_size) { 369 /* only change the defaults if target_size < chunk_size */ 370 dim = 0; 371 if (timestep >= 0) { ++dim; } 372 /* prefer to split z-axis, even down to planar slices */ 373 if (dimension == 3) { 374 /* try splitting the z-axis to core-size bits, i.e. divide chunk size by # comm_size in z-direction */ 375 if (target_size >= chunk_size / da->p) { 376 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 377 chunkDims[dim] = (hsize_t)PetscCeilInt(da->P, da->p); 378 } else { 379 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 380 radical and let everyone write all they've got */ 381 chunkDims[dim++] = (hsize_t)PetscCeilInt(da->P, da->p); 382 chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n); 383 chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m); 384 } 385 } else { 386 /* This is a 2D world exceeding 4GiB in size; yes, I've seen them, even used myself */ 387 if (target_size >= chunk_size / da->n) { 388 /* just make chunks the size of <local_z>x<whole_world_y>x<whole_world_x>x<dof> */ 389 chunkDims[dim] = (hsize_t)PetscCeilInt(da->N, da->n); 390 } else { 391 /* oops, just splitting the z-axis is NOT ENOUGH, need to split more; let's be 392 radical and let everyone write all they've got */ 393 chunkDims[dim++] = (hsize_t)PetscCeilInt(da->N, da->n); 394 chunkDims[dim++] = (hsize_t)PetscCeilInt(da->M, da->m); 395 } 396 } 397 chunk_size = (hsize_t)PetscMax(1, chunkDims[0]) * PetscMax(1, chunkDims[1]) * PetscMax(1, chunkDims[2]) * PetscMax(1, chunkDims[3]) * PetscMax(1, chunkDims[4]) * PetscMax(1, chunkDims[5]) * sizeof(double); 398 } else { 399 /* precomputed chunks are fine, we don't need to do anything */ 400 } 401 } 402 PetscFunctionReturn(0); 403 } 404 #endif 405 406 #if defined(PETSC_HAVE_HDF5) 407 PetscErrorCode VecView_MPI_HDF5_DA(Vec xin, PetscViewer viewer) { 408 PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data; 409 DM dm; 410 DM_DA *da; 411 hid_t filespace; /* file dataspace identifier */ 412 hid_t chunkspace; /* chunk dataset property identifier */ 413 hid_t dset_id; /* dataset identifier */ 414 hid_t memspace; /* memory dataspace identifier */ 415 hid_t file_id; 416 hid_t group; 417 hid_t memscalartype; /* scalar type for mem (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 418 hid_t filescalartype; /* scalar type for file (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 419 hsize_t dim; 420 hsize_t maxDims[6] = {0}, dims[6] = {0}, chunkDims[6] = {0}, count[6] = {0}, offset[6] = {0}; /* we depend on these being sane later on */ 421 PetscBool timestepping = PETSC_FALSE, dim2, spoutput; 422 PetscInt timestep = PETSC_MIN_INT, dimension; 423 const PetscScalar *x; 424 const char *vecname; 425 426 PetscFunctionBegin; 427 PetscCall(PetscViewerHDF5OpenGroup(viewer, &file_id, &group)); 428 PetscCall(PetscViewerHDF5IsTimestepping(viewer, ×tepping)); 429 if (timestepping) { PetscCall(PetscViewerHDF5GetTimestep(viewer, ×tep)); } 430 PetscCall(PetscViewerHDF5GetBaseDimension2(viewer, &dim2)); 431 PetscCall(PetscViewerHDF5GetSPOutput(viewer, &spoutput)); 432 433 PetscCall(VecGetDM(xin, &dm)); 434 PetscCheck(dm, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA"); 435 da = (DM_DA *)dm->data; 436 PetscCall(DMGetDimension(dm, &dimension)); 437 438 /* Create the dataspace for the dataset. 439 * 440 * dims - holds the current dimensions of the dataset 441 * 442 * maxDims - holds the maximum dimensions of the dataset (unlimited 443 * for the number of time steps with the current dimensions for the 444 * other dimensions; so only additional time steps can be added). 445 * 446 * chunkDims - holds the size of a single time step (required to 447 * permit extending dataset). 448 */ 449 dim = 0; 450 if (timestep >= 0) { 451 dims[dim] = timestep + 1; 452 maxDims[dim] = H5S_UNLIMITED; 453 chunkDims[dim] = 1; 454 ++dim; 455 } 456 if (dimension == 3) { 457 PetscCall(PetscHDF5IntCast(da->P, dims + dim)); 458 maxDims[dim] = dims[dim]; 459 chunkDims[dim] = dims[dim]; 460 ++dim; 461 } 462 if (dimension > 1) { 463 PetscCall(PetscHDF5IntCast(da->N, dims + dim)); 464 maxDims[dim] = dims[dim]; 465 chunkDims[dim] = dims[dim]; 466 ++dim; 467 } 468 PetscCall(PetscHDF5IntCast(da->M, dims + dim)); 469 maxDims[dim] = dims[dim]; 470 chunkDims[dim] = dims[dim]; 471 ++dim; 472 if (da->w > 1 || dim2) { 473 PetscCall(PetscHDF5IntCast(da->w, dims + dim)); 474 maxDims[dim] = dims[dim]; 475 chunkDims[dim] = dims[dim]; 476 ++dim; 477 } 478 #if defined(PETSC_USE_COMPLEX) 479 dims[dim] = 2; 480 maxDims[dim] = dims[dim]; 481 chunkDims[dim] = dims[dim]; 482 ++dim; 483 #endif 484 485 PetscCall(VecGetHDF5ChunkSize(da, xin, dimension, timestep, chunkDims)); 486 487 PetscCallHDF5Return(filespace, H5Screate_simple, (dim, dims, maxDims)); 488 489 #if defined(PETSC_USE_REAL_SINGLE) 490 memscalartype = H5T_NATIVE_FLOAT; 491 filescalartype = H5T_NATIVE_FLOAT; 492 #elif defined(PETSC_USE_REAL___FLOAT128) 493 #error "HDF5 output with 128 bit floats not supported." 494 #elif defined(PETSC_USE_REAL___FP16) 495 #error "HDF5 output with 16 bit floats not supported." 496 #else 497 memscalartype = H5T_NATIVE_DOUBLE; 498 if (spoutput == PETSC_TRUE) filescalartype = H5T_NATIVE_FLOAT; 499 else filescalartype = H5T_NATIVE_DOUBLE; 500 #endif 501 502 /* Create the dataset with default properties and close filespace */ 503 PetscCall(PetscObjectGetName((PetscObject)xin, &vecname)); 504 if (!H5Lexists(group, vecname, H5P_DEFAULT)) { 505 /* Create chunk */ 506 PetscCallHDF5Return(chunkspace, H5Pcreate, (H5P_DATASET_CREATE)); 507 PetscCallHDF5(H5Pset_chunk, (chunkspace, dim, chunkDims)); 508 509 PetscCallHDF5Return(dset_id, H5Dcreate2, (group, vecname, filescalartype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT)); 510 } else { 511 PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT)); 512 PetscCallHDF5(H5Dset_extent, (dset_id, dims)); 513 } 514 PetscCallHDF5(H5Sclose, (filespace)); 515 516 /* Each process defines a dataset and writes it to the hyperslab in the file */ 517 dim = 0; 518 if (timestep >= 0) { 519 offset[dim] = timestep; 520 ++dim; 521 } 522 if (dimension == 3) PetscCall(PetscHDF5IntCast(da->zs, offset + dim++)); 523 if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ys, offset + dim++)); 524 PetscCall(PetscHDF5IntCast(da->xs / da->w, offset + dim++)); 525 if (da->w > 1 || dim2) offset[dim++] = 0; 526 #if defined(PETSC_USE_COMPLEX) 527 offset[dim++] = 0; 528 #endif 529 dim = 0; 530 if (timestep >= 0) { 531 count[dim] = 1; 532 ++dim; 533 } 534 if (dimension == 3) PetscCall(PetscHDF5IntCast(da->ze - da->zs, count + dim++)); 535 if (dimension > 1) PetscCall(PetscHDF5IntCast(da->ye - da->ys, count + dim++)); 536 PetscCall(PetscHDF5IntCast((da->xe - da->xs) / da->w, count + dim++)); 537 if (da->w > 1 || dim2) PetscCall(PetscHDF5IntCast(da->w, count + dim++)); 538 #if defined(PETSC_USE_COMPLEX) 539 count[dim++] = 2; 540 #endif 541 PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL)); 542 PetscCallHDF5Return(filespace, H5Dget_space, (dset_id)); 543 PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 544 545 PetscCall(VecGetArrayRead(xin, &x)); 546 PetscCallHDF5(H5Dwrite, (dset_id, memscalartype, memspace, filespace, hdf5->dxpl_id, x)); 547 PetscCallHDF5(H5Fflush, (file_id, H5F_SCOPE_GLOBAL)); 548 PetscCall(VecRestoreArrayRead(xin, &x)); 549 550 #if defined(PETSC_USE_COMPLEX) 551 { 552 PetscBool tru = PETSC_TRUE; 553 PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "complex", PETSC_BOOL, &tru)); 554 } 555 #endif 556 if (timestepping) { PetscCall(PetscViewerHDF5WriteObjectAttribute(viewer, (PetscObject)xin, "timestepping", PETSC_BOOL, ×tepping)); } 557 558 /* Close/release resources */ 559 if (group != file_id) { PetscCallHDF5(H5Gclose, (group)); } 560 PetscCallHDF5(H5Sclose, (filespace)); 561 PetscCallHDF5(H5Sclose, (memspace)); 562 PetscCallHDF5(H5Dclose, (dset_id)); 563 PetscCall(PetscInfo(xin, "Wrote Vec object with name %s\n", vecname)); 564 PetscFunctionReturn(0); 565 } 566 #endif 567 568 extern PetscErrorCode VecView_MPI_Draw_DA1d(Vec, PetscViewer); 569 570 #if defined(PETSC_HAVE_MPIIO) 571 static PetscErrorCode DMDAArrayMPIIO(DM da, PetscViewer viewer, Vec xin, PetscBool write) { 572 MPI_File mfdes; 573 PetscMPIInt gsizes[4], lsizes[4], lstarts[4], asiz, dof; 574 MPI_Datatype view; 575 const PetscScalar *array; 576 MPI_Offset off; 577 MPI_Aint ub, ul; 578 PetscInt type, rows, vecrows, tr[2]; 579 DM_DA *dd = (DM_DA *)da->data; 580 PetscBool skipheader; 581 582 PetscFunctionBegin; 583 PetscCall(VecGetSize(xin, &vecrows)); 584 PetscCall(PetscViewerBinaryGetSkipHeader(viewer, &skipheader)); 585 if (!write) { 586 /* Read vector header. */ 587 if (!skipheader) { 588 PetscCall(PetscViewerBinaryRead(viewer, tr, 2, NULL, PETSC_INT)); 589 type = tr[0]; 590 rows = tr[1]; 591 PetscCheck(type == VEC_FILE_CLASSID, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Not vector next in file"); 592 PetscCheck(rows == vecrows, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_SIZ, "Vector in file not same size as DMDA vector"); 593 } 594 } else { 595 tr[0] = VEC_FILE_CLASSID; 596 tr[1] = vecrows; 597 if (!skipheader) { PetscCall(PetscViewerBinaryWrite(viewer, tr, 2, PETSC_INT)); } 598 } 599 600 PetscCall(PetscMPIIntCast(dd->w, &dof)); 601 gsizes[0] = dof; 602 PetscCall(PetscMPIIntCast(dd->M, gsizes + 1)); 603 PetscCall(PetscMPIIntCast(dd->N, gsizes + 2)); 604 PetscCall(PetscMPIIntCast(dd->P, gsizes + 3)); 605 lsizes[0] = dof; 606 PetscCall(PetscMPIIntCast((dd->xe - dd->xs) / dof, lsizes + 1)); 607 PetscCall(PetscMPIIntCast(dd->ye - dd->ys, lsizes + 2)); 608 PetscCall(PetscMPIIntCast(dd->ze - dd->zs, lsizes + 3)); 609 lstarts[0] = 0; 610 PetscCall(PetscMPIIntCast(dd->xs / dof, lstarts + 1)); 611 PetscCall(PetscMPIIntCast(dd->ys, lstarts + 2)); 612 PetscCall(PetscMPIIntCast(dd->zs, lstarts + 3)); 613 PetscCallMPI(MPI_Type_create_subarray(da->dim + 1, gsizes, lsizes, lstarts, MPI_ORDER_FORTRAN, MPIU_SCALAR, &view)); 614 PetscCallMPI(MPI_Type_commit(&view)); 615 616 PetscCall(PetscViewerBinaryGetMPIIODescriptor(viewer, &mfdes)); 617 PetscCall(PetscViewerBinaryGetMPIIOOffset(viewer, &off)); 618 PetscCallMPI(MPI_File_set_view(mfdes, off, MPIU_SCALAR, view, (char *)"native", MPI_INFO_NULL)); 619 PetscCall(VecGetArrayRead(xin, &array)); 620 asiz = lsizes[1] * (lsizes[2] > 0 ? lsizes[2] : 1) * (lsizes[3] > 0 ? lsizes[3] : 1) * dof; 621 if (write) PetscCall(MPIU_File_write_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE)); 622 else PetscCall(MPIU_File_read_all(mfdes, (PetscScalar *)array, asiz, MPIU_SCALAR, MPI_STATUS_IGNORE)); 623 PetscCallMPI(MPI_Type_get_extent(view, &ul, &ub)); 624 PetscCall(PetscViewerBinaryAddMPIIOOffset(viewer, ub)); 625 PetscCall(VecRestoreArrayRead(xin, &array)); 626 PetscCallMPI(MPI_Type_free(&view)); 627 PetscFunctionReturn(0); 628 } 629 #endif 630 631 PetscErrorCode VecView_MPI_DA(Vec xin, PetscViewer viewer) { 632 DM da; 633 PetscInt dim; 634 Vec natural; 635 PetscBool isdraw, isvtk, isglvis; 636 #if defined(PETSC_HAVE_HDF5) 637 PetscBool ishdf5; 638 #endif 639 const char *prefix, *name; 640 PetscViewerFormat format; 641 642 PetscFunctionBegin; 643 PetscCall(VecGetDM(xin, &da)); 644 PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA"); 645 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERDRAW, &isdraw)); 646 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk)); 647 #if defined(PETSC_HAVE_HDF5) 648 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5)); 649 #endif 650 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERGLVIS, &isglvis)); 651 if (isdraw) { 652 PetscCall(DMDAGetInfo(da, &dim, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL)); 653 if (dim == 1) { 654 PetscCall(VecView_MPI_Draw_DA1d(xin, viewer)); 655 } else if (dim == 2) { 656 PetscCall(VecView_MPI_Draw_DA2d(xin, viewer)); 657 } else SETERRQ(PetscObjectComm((PetscObject)da), PETSC_ERR_SUP, "Cannot graphically view vector associated with this dimensional DMDA %" PetscInt_FMT, dim); 658 } else if (isvtk) { /* Duplicate the Vec */ 659 Vec Y; 660 PetscCall(VecDuplicate(xin, &Y)); 661 if (((PetscObject)xin)->name) { 662 /* If xin was named, copy the name over to Y. The duplicate names are safe because nobody else will ever see Y. */ 663 PetscCall(PetscObjectSetName((PetscObject)Y, ((PetscObject)xin)->name)); 664 } 665 PetscCall(VecCopy(xin, Y)); 666 { 667 PetscObject dmvtk; 668 PetscBool compatible, compatibleSet; 669 PetscCall(PetscViewerVTKGetDM(viewer, &dmvtk)); 670 if (dmvtk) { 671 PetscValidHeaderSpecific((DM)dmvtk, DM_CLASSID, 2); 672 PetscCall(DMGetCompatibility(da, (DM)dmvtk, &compatible, &compatibleSet)); 673 PetscCheck(compatibleSet && compatible, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_INCOMP, "Cannot confirm compatibility of DMs associated with Vecs viewed in the same VTK file. Check that grids are the same."); 674 } 675 PetscCall(PetscViewerVTKAddField(viewer, (PetscObject)da, DMDAVTKWriteAll, PETSC_DEFAULT, PETSC_VTK_POINT_FIELD, PETSC_FALSE, (PetscObject)Y)); 676 } 677 #if defined(PETSC_HAVE_HDF5) 678 } else if (ishdf5) { 679 PetscCall(VecView_MPI_HDF5_DA(xin, viewer)); 680 #endif 681 } else if (isglvis) { 682 PetscCall(VecView_GLVis(xin, viewer)); 683 } else { 684 #if defined(PETSC_HAVE_MPIIO) 685 PetscBool isbinary, isMPIIO; 686 687 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 688 if (isbinary) { 689 PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO)); 690 if (isMPIIO) { 691 PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_TRUE)); 692 PetscFunctionReturn(0); 693 } 694 } 695 #endif 696 697 /* call viewer on natural ordering */ 698 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix)); 699 PetscCall(DMDACreateNaturalVector(da, &natural)); 700 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix)); 701 PetscCall(DMDAGlobalToNaturalBegin(da, xin, INSERT_VALUES, natural)); 702 PetscCall(DMDAGlobalToNaturalEnd(da, xin, INSERT_VALUES, natural)); 703 PetscCall(PetscObjectGetName((PetscObject)xin, &name)); 704 PetscCall(PetscObjectSetName((PetscObject)natural, name)); 705 706 PetscCall(PetscViewerGetFormat(viewer, &format)); 707 if (format == PETSC_VIEWER_BINARY_MATLAB) { 708 /* temporarily remove viewer format so it won't trigger in the VecView() */ 709 PetscCall(PetscViewerPushFormat(viewer, PETSC_VIEWER_DEFAULT)); 710 } 711 712 ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_TRUE; 713 PetscCall(VecView(natural, viewer)); 714 ((PetscObject)natural)->donotPetscObjectPrintClassNamePrefixType = PETSC_FALSE; 715 716 if (format == PETSC_VIEWER_BINARY_MATLAB) { 717 MPI_Comm comm; 718 FILE *info; 719 const char *fieldname; 720 char fieldbuf[256]; 721 PetscInt dim, ni, nj, nk, pi, pj, pk, dof, n; 722 723 /* set the viewer format back into the viewer */ 724 PetscCall(PetscViewerPopFormat(viewer)); 725 PetscCall(PetscObjectGetComm((PetscObject)viewer, &comm)); 726 PetscCall(PetscViewerBinaryGetInfoPointer(viewer, &info)); 727 PetscCall(DMDAGetInfo(da, &dim, &ni, &nj, &nk, &pi, &pj, &pk, &dof, NULL, NULL, NULL, NULL, NULL)); 728 PetscCall(PetscFPrintf(comm, info, "#--- begin code written by PetscViewerBinary for MATLAB format ---#\n")); 729 PetscCall(PetscFPrintf(comm, info, "#$$ tmp = PetscBinaryRead(fd); \n")); 730 if (dim == 1) { PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni)); } 731 if (dim == 2) { PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj)); } 732 if (dim == 3) { PetscCall(PetscFPrintf(comm, info, "#$$ tmp = reshape(tmp,%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ",%" PetscInt_FMT ");\n", dof, ni, nj, nk)); } 733 734 for (n = 0; n < dof; n++) { 735 PetscCall(DMDAGetFieldName(da, n, &fieldname)); 736 if (!fieldname || !fieldname[0]) { 737 PetscCall(PetscSNPrintf(fieldbuf, sizeof fieldbuf, "field%" PetscInt_FMT, n)); 738 fieldname = fieldbuf; 739 } 740 if (dim == 1) { PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:))';\n", name, fieldname, n + 1)); } 741 if (dim == 2) { PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = squeeze(tmp(%" PetscInt_FMT ",:,:))';\n", name, fieldname, n + 1)); } 742 if (dim == 3) { PetscCall(PetscFPrintf(comm, info, "#$$ Set.%s.%s = permute(squeeze(tmp(%" PetscInt_FMT ",:,:,:)),[2 1 3]);\n", name, fieldname, n + 1)); } 743 } 744 PetscCall(PetscFPrintf(comm, info, "#$$ clear tmp; \n")); 745 PetscCall(PetscFPrintf(comm, info, "#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n")); 746 } 747 748 PetscCall(VecDestroy(&natural)); 749 } 750 PetscFunctionReturn(0); 751 } 752 753 #if defined(PETSC_HAVE_HDF5) 754 PetscErrorCode VecLoad_HDF5_DA(Vec xin, PetscViewer viewer) { 755 PetscViewer_HDF5 *hdf5 = (PetscViewer_HDF5 *)viewer->data; 756 DM da; 757 int dim, rdim; 758 hsize_t dims[6] = {0}, count[6] = {0}, offset[6] = {0}; 759 PetscBool dim2 = PETSC_FALSE, timestepping = PETSC_FALSE; 760 PetscInt dimension, timestep = PETSC_MIN_INT, dofInd; 761 PetscScalar *x; 762 const char *vecname; 763 hid_t filespace; /* file dataspace identifier */ 764 hid_t dset_id; /* dataset identifier */ 765 hid_t memspace; /* memory dataspace identifier */ 766 hid_t file_id, group; 767 hid_t scalartype; /* scalar type (H5T_NATIVE_FLOAT or H5T_NATIVE_DOUBLE) */ 768 DM_DA *dd; 769 770 PetscFunctionBegin; 771 #if defined(PETSC_USE_REAL_SINGLE) 772 scalartype = H5T_NATIVE_FLOAT; 773 #elif defined(PETSC_USE_REAL___FLOAT128) 774 #error "HDF5 output with 128 bit floats not supported." 775 #elif defined(PETSC_USE_REAL___FP16) 776 #error "HDF5 output with 16 bit floats not supported." 777 #else 778 scalartype = H5T_NATIVE_DOUBLE; 779 #endif 780 781 PetscCall(PetscViewerHDF5OpenGroup(viewer, &file_id, &group)); 782 PetscCall(PetscObjectGetName((PetscObject)xin, &vecname)); 783 PetscCall(PetscViewerHDF5CheckTimestepping_Internal(viewer, vecname)); 784 PetscCall(PetscViewerHDF5IsTimestepping(viewer, ×tepping)); 785 if (timestepping) { PetscCall(PetscViewerHDF5GetTimestep(viewer, ×tep)); } 786 PetscCall(VecGetDM(xin, &da)); 787 dd = (DM_DA *)da->data; 788 PetscCall(DMGetDimension(da, &dimension)); 789 790 /* Open dataset */ 791 PetscCallHDF5Return(dset_id, H5Dopen2, (group, vecname, H5P_DEFAULT)); 792 793 /* Retrieve the dataspace for the dataset */ 794 PetscCallHDF5Return(filespace, H5Dget_space, (dset_id)); 795 PetscCallHDF5Return(rdim, H5Sget_simple_extent_dims, (filespace, dims, NULL)); 796 797 /* Expected dimension for holding the dof's */ 798 #if defined(PETSC_USE_COMPLEX) 799 dofInd = rdim - 2; 800 #else 801 dofInd = rdim - 1; 802 #endif 803 804 /* The expected number of dimensions, assuming basedimension2 = false */ 805 dim = dimension; 806 if (dd->w > 1) ++dim; 807 if (timestep >= 0) ++dim; 808 #if defined(PETSC_USE_COMPLEX) 809 ++dim; 810 #endif 811 812 /* In this case the input dataset have one extra, unexpected dimension. */ 813 if (rdim == dim + 1) { 814 /* In this case the block size unity */ 815 if (dd->w == 1 && dims[dofInd] == 1) dim2 = PETSC_TRUE; 816 817 /* Special error message for the case where dof does not match the input file */ 818 else PetscCheck(dd->w == (PetscInt)dims[dofInd], PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Number of dofs in file is %" PetscInt_FMT ", not %" PetscInt_FMT " as expected", (PetscInt)dims[dofInd], dd->w); 819 820 /* Other cases where rdim != dim cannot be handled currently */ 821 } else PetscCheck(rdim == dim, PETSC_COMM_SELF, PETSC_ERR_FILE_UNEXPECTED, "Dimension of array in file is %d, not %d as expected with dof = %" PetscInt_FMT, rdim, dim, dd->w); 822 823 /* Set up the hyperslab size */ 824 dim = 0; 825 if (timestep >= 0) { 826 offset[dim] = timestep; 827 count[dim] = 1; 828 ++dim; 829 } 830 if (dimension == 3) { 831 PetscCall(PetscHDF5IntCast(dd->zs, offset + dim)); 832 PetscCall(PetscHDF5IntCast(dd->ze - dd->zs, count + dim)); 833 ++dim; 834 } 835 if (dimension > 1) { 836 PetscCall(PetscHDF5IntCast(dd->ys, offset + dim)); 837 PetscCall(PetscHDF5IntCast(dd->ye - dd->ys, count + dim)); 838 ++dim; 839 } 840 PetscCall(PetscHDF5IntCast(dd->xs / dd->w, offset + dim)); 841 PetscCall(PetscHDF5IntCast((dd->xe - dd->xs) / dd->w, count + dim)); 842 ++dim; 843 if (dd->w > 1 || dim2) { 844 offset[dim] = 0; 845 PetscCall(PetscHDF5IntCast(dd->w, count + dim)); 846 ++dim; 847 } 848 #if defined(PETSC_USE_COMPLEX) 849 offset[dim] = 0; 850 count[dim] = 2; 851 ++dim; 852 #endif 853 854 /* Create the memory and filespace */ 855 PetscCallHDF5Return(memspace, H5Screate_simple, (dim, count, NULL)); 856 PetscCallHDF5(H5Sselect_hyperslab, (filespace, H5S_SELECT_SET, offset, NULL, count, NULL)); 857 858 PetscCall(VecGetArray(xin, &x)); 859 PetscCallHDF5(H5Dread, (dset_id, scalartype, memspace, filespace, hdf5->dxpl_id, x)); 860 PetscCall(VecRestoreArray(xin, &x)); 861 862 /* Close/release resources */ 863 if (group != file_id) { PetscCallHDF5(H5Gclose, (group)); } 864 PetscCallHDF5(H5Sclose, (filespace)); 865 PetscCallHDF5(H5Sclose, (memspace)); 866 PetscCallHDF5(H5Dclose, (dset_id)); 867 PetscFunctionReturn(0); 868 } 869 #endif 870 871 PetscErrorCode VecLoad_Binary_DA(Vec xin, PetscViewer viewer) { 872 DM da; 873 Vec natural; 874 const char *prefix; 875 PetscInt bs; 876 PetscBool flag; 877 DM_DA *dd; 878 #if defined(PETSC_HAVE_MPIIO) 879 PetscBool isMPIIO; 880 #endif 881 882 PetscFunctionBegin; 883 PetscCall(VecGetDM(xin, &da)); 884 dd = (DM_DA *)da->data; 885 #if defined(PETSC_HAVE_MPIIO) 886 PetscCall(PetscViewerBinaryGetUseMPIIO(viewer, &isMPIIO)); 887 if (isMPIIO) { 888 PetscCall(DMDAArrayMPIIO(da, viewer, xin, PETSC_FALSE)); 889 PetscFunctionReturn(0); 890 } 891 #endif 892 893 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)xin, &prefix)); 894 PetscCall(DMDACreateNaturalVector(da, &natural)); 895 PetscCall(PetscObjectSetName((PetscObject)natural, ((PetscObject)xin)->name)); 896 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)natural, prefix)); 897 PetscCall(VecLoad(natural, viewer)); 898 PetscCall(DMDANaturalToGlobalBegin(da, natural, INSERT_VALUES, xin)); 899 PetscCall(DMDANaturalToGlobalEnd(da, natural, INSERT_VALUES, xin)); 900 PetscCall(VecDestroy(&natural)); 901 PetscCall(PetscInfo(xin, "Loading vector from natural ordering into DMDA\n")); 902 PetscCall(PetscOptionsGetInt(NULL, ((PetscObject)xin)->prefix, "-vecload_block_size", &bs, &flag)); 903 if (flag && bs != dd->w) { PetscCall(PetscInfo(xin, "Block size in file %" PetscInt_FMT " not equal to DMDA's dof %" PetscInt_FMT "\n", bs, dd->w)); } 904 PetscFunctionReturn(0); 905 } 906 907 PetscErrorCode VecLoad_Default_DA(Vec xin, PetscViewer viewer) { 908 DM da; 909 PetscBool isbinary; 910 #if defined(PETSC_HAVE_HDF5) 911 PetscBool ishdf5; 912 #endif 913 914 PetscFunctionBegin; 915 PetscCall(VecGetDM(xin, &da)); 916 PetscCheck(da, PetscObjectComm((PetscObject)xin), PETSC_ERR_ARG_WRONG, "Vector not generated from a DMDA"); 917 918 #if defined(PETSC_HAVE_HDF5) 919 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERHDF5, &ishdf5)); 920 #endif 921 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERBINARY, &isbinary)); 922 923 if (isbinary) { 924 PetscCall(VecLoad_Binary_DA(xin, viewer)); 925 #if defined(PETSC_HAVE_HDF5) 926 } else if (ishdf5) { 927 PetscCall(VecLoad_HDF5_DA(xin, viewer)); 928 #endif 929 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Viewer type %s not supported for vector loading", ((PetscObject)viewer)->type_name); 930 PetscFunctionReturn(0); 931 } 932