1 #define PETSCDM_DLL 2 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 3 4 #if defined(PETSC_HAVE_EXODUSII) 5 #include <netcdf.h> 6 #include <exodusII.h> 7 #endif 8 9 #include <petsc/private/viewerimpl.h> 10 #include <petsc/private/viewerexodusiiimpl.h> 11 #if defined(PETSC_HAVE_EXODUSII) 12 /* 13 PETSC_VIEWER_EXODUSII_ - Creates an ExodusII PetscViewer shared by all processors in a communicator. 14 15 Collective 16 17 Input Parameter: 18 . comm - the MPI communicator to share the ExodusII PetscViewer 19 20 Level: intermediate 21 22 Notes: 23 misses Fortran bindings 24 25 Notes: 26 Unlike almost all other PETSc routines, PETSC_VIEWER_EXODUSII_ does not return 27 an error code. The GLVIS PetscViewer is usually used in the form 28 $ XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm)); 29 30 .seealso: PetscViewerExodusIIOpen(), PetscViewerType, PetscViewerCreate(), PetscViewerDestroy() 31 */ 32 PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm) 33 { 34 PetscViewer viewer; 35 PetscErrorCode ierr; 36 37 PetscFunctionBegin; 38 ierr = PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer); 39 if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} 40 ierr = PetscObjectRegisterDestroy((PetscObject) viewer); 41 if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} 42 PetscFunctionReturn(viewer); 43 } 44 45 static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer) 46 { 47 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) v->data; 48 49 PetscFunctionBegin; 50 if (exo->filename) PetscCall(PetscViewerASCIIPrintf(viewer, "Filename: %s\n", exo->filename)); 51 if (exo->exoid) PetscCall(PetscViewerASCIIPrintf(viewer, "exoid: %d\n", exo->exoid)); 52 if (exo->btype) PetscCall(PetscViewerASCIIPrintf(viewer, "IO Mode: %d\n", exo->btype)); 53 if (exo->order) PetscCall(PetscViewerASCIIPrintf(viewer, "Mesh order: %d\n", exo->order)); 54 PetscFunctionReturn(0); 55 } 56 57 static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscOptionItems *PetscOptionsObject, PetscViewer v) 58 { 59 PetscFunctionBegin; 60 PetscOptionsHeadBegin(PetscOptionsObject, "ExodusII PetscViewer Options"); 61 PetscOptionsHeadEnd(); 62 PetscFunctionReturn(0); 63 } 64 65 static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer) 66 { 67 PetscFunctionBegin; 68 PetscFunctionReturn(0); 69 } 70 71 static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer) 72 { 73 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 74 75 PetscFunctionBegin; 76 if (exo->exoid >= 0) {PetscStackCallStandard(ex_close,exo->exoid);} 77 PetscCall(PetscFree(exo->filename)); 78 PetscCall(PetscFree(exo)); 79 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetName_C",NULL)); 80 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileGetName_C",NULL)); 81 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetMode_C",NULL)); 82 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetId_C",NULL)); 83 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetOrder_C",NULL)); 84 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerSetOrder_C",NULL)); 85 PetscFunctionReturn(0); 86 } 87 88 static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[]) 89 { 90 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 91 PetscMPIInt rank; 92 int CPU_word_size, IO_word_size, EXO_mode; 93 MPI_Info mpi_info = MPI_INFO_NULL; 94 float EXO_version; 95 96 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank)); 97 CPU_word_size = sizeof(PetscReal); 98 IO_word_size = sizeof(PetscReal); 99 100 PetscFunctionBegin; 101 if (exo->exoid >= 0) { 102 PetscStackCallStandard(ex_close,exo->exoid); 103 exo->exoid = -1; 104 } 105 if (exo->filename) PetscCall(PetscFree(exo->filename)); 106 PetscCall(PetscStrallocpy(name, &exo->filename)); 107 switch (exo->btype) { 108 case FILE_MODE_READ: 109 EXO_mode = EX_READ; 110 break; 111 case FILE_MODE_APPEND: 112 case FILE_MODE_UPDATE: 113 case FILE_MODE_APPEND_UPDATE: 114 /* Will fail if the file does not already exist */ 115 EXO_mode = EX_WRITE; 116 break; 117 case FILE_MODE_WRITE: 118 /* 119 exodus only allows writing geometry upon file creation, so we will let DMView create the file. 120 */ 121 PetscFunctionReturn(0); 122 break; 123 default: 124 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()"); 125 } 126 #if defined(PETSC_USE_64BIT_INDICES) 127 EXO_mode += EX_ALL_INT64_API; 128 #endif 129 exo->exoid = ex_open_par(name,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,PETSC_COMM_WORLD,mpi_info); 130 PetscCheck(exo->exoid >= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", name); 131 PetscFunctionReturn(0); 132 } 133 134 static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name) 135 { 136 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 137 138 PetscFunctionBegin; 139 *name = exo->filename; 140 PetscFunctionReturn(0); 141 } 142 143 static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type) 144 { 145 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 146 147 PetscFunctionBegin; 148 exo->btype = type; 149 PetscFunctionReturn(0); 150 } 151 152 static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type) 153 { 154 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 155 156 PetscFunctionBegin; 157 *type = exo->btype; 158 PetscFunctionReturn(0); 159 } 160 161 static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid) 162 { 163 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 164 165 PetscFunctionBegin; 166 *exoid = exo->exoid; 167 PetscFunctionReturn(0); 168 } 169 170 static PetscErrorCode PetscViewerExodusIIGetOrder_ExodusII(PetscViewer viewer, PetscInt *order) 171 { 172 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 173 174 PetscFunctionBegin; 175 *order = exo->order; 176 PetscFunctionReturn(0); 177 } 178 179 static PetscErrorCode PetscViewerExodusIISetOrder_ExodusII(PetscViewer viewer, PetscInt order) 180 { 181 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 182 183 PetscFunctionBegin; 184 exo->order = order; 185 PetscFunctionReturn(0); 186 } 187 188 /*MC 189 PETSCVIEWEREXODUSII - A viewer that writes to an Exodus II file 190 191 .seealso: PetscViewerExodusIIOpen(), PetscViewerCreate(), PETSCVIEWERBINARY, PETSCVIEWERHDF5, DMView(), 192 PetscViewerFileSetName(), PetscViewerFileSetMode(), PetscViewerFormat, PetscViewerType, PetscViewerSetType() 193 194 Level: beginner 195 M*/ 196 197 PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v) 198 { 199 PetscViewer_ExodusII *exo; 200 201 PetscFunctionBegin; 202 PetscCall(PetscNewLog(v,&exo)); 203 204 v->data = (void*) exo; 205 v->ops->destroy = PetscViewerDestroy_ExodusII; 206 v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII; 207 v->ops->setup = PetscViewerSetUp_ExodusII; 208 v->ops->view = PetscViewerView_ExodusII; 209 v->ops->flush = 0; 210 exo->btype = (PetscFileMode) -1; 211 exo->filename = 0; 212 exo->exoid = -1; 213 214 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetName_C",PetscViewerFileSetName_ExodusII)); 215 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetName_C",PetscViewerFileGetName_ExodusII)); 216 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetMode_C",PetscViewerFileSetMode_ExodusII)); 217 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetMode_C",PetscViewerFileGetMode_ExodusII)); 218 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetId_C",PetscViewerExodusIIGetId_ExodusII)); 219 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerSetOrder_C",PetscViewerExodusIISetOrder_ExodusII)); 220 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetOrder_C",PetscViewerExodusIIGetOrder_ExodusII)); 221 PetscFunctionReturn(0); 222 } 223 224 /* 225 EXOGetVarIndex - Locate a result in an exodus file based on its name 226 227 Collective 228 229 Input Parameters: 230 + exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 231 . obj_type - the type of entity for instance EX_NODAL, EX_ELEM_BLOCK 232 - name - the name of the result 233 234 Output Parameters: 235 . varIndex - the location in the exodus file of the result 236 237 Notes: 238 The exodus variable index is obtained by comparing name and the 239 names of zonal variables declared in the exodus file. For instance if name is "V" 240 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 241 amongst all variables of type obj_type. 242 243 Level: beginner 244 245 .seealso: EXOGetVarIndex(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadNodal_PlexEXO(),VecLoadZonal_PlexEXO() 246 */ 247 PetscErrorCode EXOGetVarIndex_Internal(int exoid, ex_entity_type obj_type, const char name[], int *varIndex) 248 { 249 int num_vars, i, j; 250 char ext_name[MAX_STR_LENGTH+1], var_name[MAX_STR_LENGTH+1]; 251 const int num_suffix = 5; 252 char *suffix[5]; 253 PetscBool flg; 254 255 PetscFunctionBegin; 256 suffix[0] = (char *) ""; 257 suffix[1] = (char *) "_X"; 258 suffix[2] = (char *) "_XX"; 259 suffix[3] = (char *) "_1"; 260 suffix[4] = (char *) "_11"; 261 262 *varIndex = -1; 263 PetscStackCallStandard(ex_get_variable_param,exoid, obj_type, &num_vars); 264 for (i = 0; i < num_vars; ++i) { 265 PetscStackCallStandard(ex_get_variable_name,exoid, obj_type, i+1, var_name); 266 for (j = 0; j < num_suffix; ++j) { 267 PetscCall(PetscStrncpy(ext_name, name, MAX_STR_LENGTH)); 268 PetscCall(PetscStrlcat(ext_name, suffix[j], MAX_STR_LENGTH)); 269 PetscCall(PetscStrcasecmp(ext_name, var_name, &flg)); 270 if (flg) { 271 *varIndex = i+1; 272 } 273 } 274 } 275 PetscFunctionReturn(0); 276 } 277 278 /* 279 DMView_PlexExodusII - Write a DM to disk in exodus format 280 281 Collective on dm 282 283 Input Parameters: 284 + dm - The dm to be written 285 . viewer - an exodusII viewer 286 287 Notes: 288 Not all DM can be written to disk this way. For instance, exodus assume that element blocks (mapped to "Cell sets" labels) 289 consists of sequentially numbered cells. If this is not the case, the exodus file will be corrupted. 290 291 If the dm has been distributed, only the part of the DM on MPI rank 0 (including "ghost" cells and vertices) 292 will be written. 293 294 DMPlex only represents geometry while most post-processing software expect that a mesh also provides information 295 on the discretization space. This function assumes that the file represents Lagrange finite elements of order 1 or 2. 296 The order of the mesh shall be set using PetscViewerExodusIISetOrder 297 It should be extended to use PetscFE objects. 298 299 This function will only handle TRI, TET, QUAD, and HEX cells. 300 Level: beginner 301 302 .seealso: 303 */ 304 PetscErrorCode DMView_PlexExodusII(DM dm, PetscViewer viewer) 305 { 306 enum ElemType {TRI, QUAD, TET, HEX}; 307 MPI_Comm comm; 308 PetscInt degree; /* the order of the mesh */ 309 /* Connectivity Variables */ 310 PetscInt cellsNotInConnectivity; 311 /* Cell Sets */ 312 DMLabel csLabel; 313 IS csIS; 314 const PetscInt *csIdx; 315 PetscInt num_cs, cs; 316 enum ElemType *type; 317 PetscBool hasLabel; 318 /* Coordinate Variables */ 319 DM cdm; 320 PetscSection coordSection; 321 Vec coord; 322 PetscInt **nodes; 323 PetscInt depth, d, dim, skipCells = 0; 324 PetscInt pStart, pEnd, p, cStart, cEnd, numCells, vStart, vEnd, numVertices, eStart, eEnd, numEdges, fStart, fEnd, numFaces, numNodes; 325 PetscInt num_vs, num_fs; 326 PetscMPIInt rank, size; 327 const char *dmName; 328 PetscInt nodesTriP1[4] = {3,0,0,0}; 329 PetscInt nodesTriP2[4] = {3,3,0,0}; 330 PetscInt nodesQuadP1[4] = {4,0,0,0}; 331 PetscInt nodesQuadP2[4] = {4,4,0,1}; 332 PetscInt nodesTetP1[4] = {4,0,0,0}; 333 PetscInt nodesTetP2[4] = {4,6,0,0}; 334 PetscInt nodesHexP1[4] = {8,0,0,0}; 335 PetscInt nodesHexP2[4] = {8,12,6,1}; 336 int CPU_word_size, IO_word_size, EXO_mode; 337 float EXO_version; 338 339 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 340 341 PetscFunctionBegin; 342 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 343 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 344 PetscCallMPI(MPI_Comm_size(comm, &size)); 345 346 /* 347 Creating coordSection is a collective operation so we do it somewhat out of sequence 348 */ 349 PetscCall(PetscSectionCreate(comm, &coordSection)); 350 PetscCall(DMGetCoordinatesLocalSetUp(dm)); 351 if (!rank) { 352 switch (exo->btype) { 353 case FILE_MODE_READ: 354 case FILE_MODE_APPEND: 355 case FILE_MODE_UPDATE: 356 case FILE_MODE_APPEND_UPDATE: 357 /* exodusII does not allow writing geometry to an existing file */ 358 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "cannot add geometry to existing file %s", exo->filename); 359 case FILE_MODE_WRITE: 360 /* Create an empty file if one already exists*/ 361 EXO_mode = EX_CLOBBER; 362 #if defined(PETSC_USE_64BIT_INDICES) 363 EXO_mode += EX_ALL_INT64_API; 364 #endif 365 CPU_word_size = sizeof(PetscReal); 366 IO_word_size = sizeof(PetscReal); 367 exo->exoid = ex_create(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size); 368 PetscCheck(exo->exoid >= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", exo->filename); 369 370 break; 371 default: 372 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()"); 373 } 374 375 /* --- Get DM info --- */ 376 PetscCall(PetscObjectGetName((PetscObject) dm, &dmName)); 377 PetscCall(DMPlexGetDepth(dm, &depth)); 378 PetscCall(DMGetDimension(dm, &dim)); 379 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); 380 PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); 381 PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd)); 382 PetscCall(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd)); 383 PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); 384 numCells = cEnd - cStart; 385 numEdges = eEnd - eStart; 386 numVertices = vEnd - vStart; 387 if (depth == 3) {numFaces = fEnd - fStart;} 388 else {numFaces = 0;} 389 PetscCall(DMGetLabelSize(dm, "Cell Sets", &num_cs)); 390 PetscCall(DMGetLabelSize(dm, "Vertex Sets", &num_vs)); 391 PetscCall(DMGetLabelSize(dm, "Face Sets", &num_fs)); 392 PetscCall(DMGetCoordinatesLocal(dm, &coord)); 393 PetscCall(DMGetCoordinateDM(dm, &cdm)); 394 if (num_cs > 0) { 395 PetscCall(DMGetLabel(dm, "Cell Sets", &csLabel)); 396 PetscCall(DMLabelGetValueIS(csLabel, &csIS)); 397 PetscCall(ISGetIndices(csIS, &csIdx)); 398 } 399 PetscCall(PetscMalloc1(num_cs, &nodes)); 400 /* Set element type for each block and compute total number of nodes */ 401 PetscCall(PetscMalloc1(num_cs, &type)); 402 numNodes = numVertices; 403 404 PetscCall(PetscViewerExodusIIGetOrder(viewer, °ree)); 405 if (degree == 2) {numNodes += numEdges;} 406 cellsNotInConnectivity = numCells; 407 for (cs = 0; cs < num_cs; ++cs) { 408 IS stratumIS; 409 const PetscInt *cells; 410 PetscScalar *xyz = NULL; 411 PetscInt csSize, closureSize; 412 413 PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS)); 414 PetscCall(ISGetIndices(stratumIS, &cells)); 415 PetscCall(ISGetSize(stratumIS, &csSize)); 416 PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz)); 417 switch (dim) { 418 case 2: 419 if (closureSize == 3*dim) {type[cs] = TRI;} 420 else if (closureSize == 4*dim) {type[cs] = QUAD;} 421 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize/dim, dim); 422 break; 423 case 3: 424 if (closureSize == 4*dim) {type[cs] = TET;} 425 else if (closureSize == 8*dim) {type[cs] = HEX;} 426 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize/dim, dim); 427 break; 428 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %" PetscInt_FMT " not handled by ExodusII viewer", dim); 429 } 430 if ((degree == 2) && (type[cs] == QUAD)) {numNodes += csSize;} 431 if ((degree == 2) && (type[cs] == HEX)) {numNodes += csSize; numNodes += numFaces;} 432 PetscCall(DMPlexVecRestoreClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz)); 433 /* Set nodes and Element type */ 434 if (type[cs] == TRI) { 435 if (degree == 1) nodes[cs] = nodesTriP1; 436 else if (degree == 2) nodes[cs] = nodesTriP2; 437 } else if (type[cs] == QUAD) { 438 if (degree == 1) nodes[cs] = nodesQuadP1; 439 else if (degree == 2) nodes[cs] = nodesQuadP2; 440 } else if (type[cs] == TET) { 441 if (degree == 1) nodes[cs] = nodesTetP1; 442 else if (degree == 2) nodes[cs] = nodesTetP2; 443 } else if (type[cs] == HEX) { 444 if (degree == 1) nodes[cs] = nodesHexP1; 445 else if (degree == 2) nodes[cs] = nodesHexP2; 446 } 447 /* Compute the number of cells not in the connectivity table */ 448 cellsNotInConnectivity -= nodes[cs][3]*csSize; 449 450 PetscCall(ISRestoreIndices(stratumIS, &cells)); 451 PetscCall(ISDestroy(&stratumIS)); 452 } 453 if (num_cs > 0) {PetscStackCallStandard(ex_put_init,exo->exoid, dmName, dim, numNodes, numCells, num_cs, num_vs, num_fs);} 454 /* --- Connectivity --- */ 455 for (cs = 0; cs < num_cs; ++cs) { 456 IS stratumIS; 457 const PetscInt *cells; 458 PetscInt *connect, off = 0; 459 PetscInt edgesInClosure = 0, facesInClosure = 0, verticesInClosure = 0; 460 PetscInt csSize, c, connectSize, closureSize; 461 char *elem_type = NULL; 462 char elem_type_tri3[] = "TRI3", elem_type_quad4[] = "QUAD4"; 463 char elem_type_tri6[] = "TRI6", elem_type_quad9[] = "QUAD9"; 464 char elem_type_tet4[] = "TET4", elem_type_hex8[] = "HEX8"; 465 char elem_type_tet10[] = "TET10", elem_type_hex27[] = "HEX27"; 466 467 PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS)); 468 PetscCall(ISGetIndices(stratumIS, &cells)); 469 PetscCall(ISGetSize(stratumIS, &csSize)); 470 /* Set Element type */ 471 if (type[cs] == TRI) { 472 if (degree == 1) elem_type = elem_type_tri3; 473 else if (degree == 2) elem_type = elem_type_tri6; 474 } else if (type[cs] == QUAD) { 475 if (degree == 1) elem_type = elem_type_quad4; 476 else if (degree == 2) elem_type = elem_type_quad9; 477 } else if (type[cs] == TET) { 478 if (degree == 1) elem_type = elem_type_tet4; 479 else if (degree == 2) elem_type = elem_type_tet10; 480 } else if (type[cs] == HEX) { 481 if (degree == 1) elem_type = elem_type_hex8; 482 else if (degree == 2) elem_type = elem_type_hex27; 483 } 484 connectSize = nodes[cs][0] + nodes[cs][1] + nodes[cs][2] + nodes[cs][3]; 485 PetscCall(PetscMalloc1(PetscMax(27,connectSize)*csSize, &connect)); 486 PetscStackCallStandard(ex_put_block,exo->exoid, EX_ELEM_BLOCK, csIdx[cs], elem_type, csSize, connectSize, 0, 0, 1); 487 /* Find number of vertices, edges, and faces in the closure */ 488 verticesInClosure = nodes[cs][0]; 489 if (depth > 1) { 490 if (dim == 2) { 491 PetscCall(DMPlexGetConeSize(dm, cells[0], &edgesInClosure)); 492 } else if (dim == 3) { 493 PetscInt *closure = NULL; 494 495 PetscCall(DMPlexGetConeSize(dm, cells[0], &facesInClosure)); 496 PetscCall(DMPlexGetTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure)); 497 edgesInClosure = closureSize - facesInClosure - 1 - verticesInClosure; 498 PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure)); 499 } 500 } 501 /* Get connectivity for each cell */ 502 for (c = 0; c < csSize; ++c) { 503 PetscInt *closure = NULL; 504 PetscInt temp, i; 505 506 PetscCall(DMPlexGetTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure)); 507 for (i = 0; i < connectSize; ++i) { 508 if (i < nodes[cs][0]) {/* Vertices */ 509 connect[i+off] = closure[(i+edgesInClosure+facesInClosure+1)*2] + 1; 510 connect[i+off] -= cellsNotInConnectivity; 511 } else if (i < nodes[cs][0]+nodes[cs][1]) { /* Edges */ 512 connect[i+off] = closure[(i-verticesInClosure+facesInClosure+1)*2] + 1; 513 if (nodes[cs][2] == 0) connect[i+off] -= numFaces; 514 connect[i+off] -= cellsNotInConnectivity; 515 } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]) { /* Cells */ 516 connect[i+off] = closure[0] + 1; 517 connect[i+off] -= skipCells; 518 } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]+nodes[cs][2]) { /* Faces */ 519 connect[i+off] = closure[(i-edgesInClosure-verticesInClosure)*2] + 1; 520 connect[i+off] -= cellsNotInConnectivity; 521 } else { 522 connect[i+off] = -1; 523 } 524 } 525 /* Tetrahedra are inverted */ 526 if (type[cs] == TET) { 527 temp = connect[0+off]; connect[0+off] = connect[1+off]; connect[1+off] = temp; 528 if (degree == 2) { 529 temp = connect[5+off]; connect[5+off] = connect[6+off]; connect[6+off] = temp; 530 temp = connect[7+off]; connect[7+off] = connect[8+off]; connect[8+off] = temp; 531 } 532 } 533 /* Hexahedra are inverted */ 534 if (type[cs] == HEX) { 535 temp = connect[1+off]; connect[1+off] = connect[3+off]; connect[3+off] = temp; 536 if (degree == 2) { 537 temp = connect[8+off]; connect[8+off] = connect[11+off]; connect[11+off] = temp; 538 temp = connect[9+off]; connect[9+off] = connect[10+off]; connect[10+off] = temp; 539 temp = connect[16+off]; connect[16+off] = connect[17+off]; connect[17+off] = temp; 540 temp = connect[18+off]; connect[18+off] = connect[19+off]; connect[19+off] = temp; 541 542 temp = connect[12+off]; connect[12+off] = connect[16+off]; connect[16+off] = temp; 543 temp = connect[13+off]; connect[13+off] = connect[17+off]; connect[17+off] = temp; 544 temp = connect[14+off]; connect[14+off] = connect[18+off]; connect[18+off] = temp; 545 temp = connect[15+off]; connect[15+off] = connect[19+off]; connect[19+off] = temp; 546 547 temp = connect[23+off]; connect[23+off] = connect[26+off]; connect[26+off] = temp; 548 temp = connect[24+off]; connect[24+off] = connect[25+off]; connect[25+off] = temp; 549 temp = connect[25+off]; connect[25+off] = connect[26+off]; connect[26+off] = temp; 550 } 551 } 552 off += connectSize; 553 PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure)); 554 } 555 PetscStackCallStandard(ex_put_conn,exo->exoid, EX_ELEM_BLOCK, csIdx[cs], connect, 0, 0); 556 skipCells += (nodes[cs][3] == 0)*csSize; 557 PetscCall(PetscFree(connect)); 558 PetscCall(ISRestoreIndices(stratumIS, &cells)); 559 PetscCall(ISDestroy(&stratumIS)); 560 } 561 PetscCall(PetscFree(type)); 562 /* --- Coordinates --- */ 563 PetscCall(PetscSectionSetChart(coordSection, pStart, pEnd)); 564 if (num_cs) { 565 for (d = 0; d < depth; ++d) { 566 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd)); 567 for (p = pStart; p < pEnd; ++p) { 568 PetscCall(PetscSectionSetDof(coordSection, p, nodes[0][d] > 0)); 569 } 570 } 571 } 572 for (cs = 0; cs < num_cs; ++cs) { 573 IS stratumIS; 574 const PetscInt *cells; 575 PetscInt csSize, c; 576 577 PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS)); 578 PetscCall(ISGetIndices(stratumIS, &cells)); 579 PetscCall(ISGetSize(stratumIS, &csSize)); 580 for (c = 0; c < csSize; ++c) { 581 PetscCall(PetscSectionSetDof(coordSection, cells[c], nodes[cs][3] > 0)); 582 } 583 PetscCall(ISRestoreIndices(stratumIS, &cells)); 584 PetscCall(ISDestroy(&stratumIS)); 585 } 586 if (num_cs > 0) { 587 PetscCall(ISRestoreIndices(csIS, &csIdx)); 588 PetscCall(ISDestroy(&csIS)); 589 } 590 PetscCall(PetscFree(nodes)); 591 PetscCall(PetscSectionSetUp(coordSection)); 592 if (numNodes > 0) { 593 const char *coordNames[3] = {"x", "y", "z"}; 594 PetscScalar *closure, *cval; 595 PetscReal *coords; 596 PetscInt hasDof, n = 0; 597 598 /* There can't be more than 24 values in the closure of a point for the coord coordSection */ 599 PetscCall(PetscCalloc3(numNodes*3, &coords, dim, &cval, 24, &closure)); 600 PetscCall(DMGetCoordinatesLocalNoncollective(dm, &coord)); 601 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); 602 for (p = pStart; p < pEnd; ++p) { 603 PetscCall(PetscSectionGetDof(coordSection, p, &hasDof)); 604 if (hasDof) { 605 PetscInt closureSize = 24, j; 606 607 PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, p, &closureSize, &closure)); 608 for (d = 0; d < dim; ++d) { 609 cval[d] = 0.0; 610 for (j = 0; j < closureSize/dim; j++) cval[d] += closure[j*dim+d]; 611 coords[d*numNodes+n] = PetscRealPart(cval[d]) * dim / closureSize; 612 } 613 ++n; 614 } 615 } 616 PetscStackCallStandard(ex_put_coord,exo->exoid, &coords[0*numNodes], &coords[1*numNodes], &coords[2*numNodes]); 617 PetscCall(PetscFree3(coords, cval, closure)); 618 PetscStackCallStandard(ex_put_coord_names,exo->exoid, (char **) coordNames); 619 } 620 621 /* --- Node Sets/Vertex Sets --- */ 622 DMHasLabel(dm, "Vertex Sets", &hasLabel); 623 if (hasLabel) { 624 PetscInt i, vs, vsSize; 625 const PetscInt *vsIdx, *vertices; 626 PetscInt *nodeList; 627 IS vsIS, stratumIS; 628 DMLabel vsLabel; 629 PetscCall(DMGetLabel(dm, "Vertex Sets", &vsLabel)); 630 PetscCall(DMLabelGetValueIS(vsLabel, &vsIS)); 631 PetscCall(ISGetIndices(vsIS, &vsIdx)); 632 for (vs=0; vs<num_vs; ++vs) { 633 PetscCall(DMLabelGetStratumIS(vsLabel, vsIdx[vs], &stratumIS)); 634 PetscCall(ISGetIndices(stratumIS, &vertices)); 635 PetscCall(ISGetSize(stratumIS, &vsSize)); 636 PetscCall(PetscMalloc1(vsSize, &nodeList)); 637 for (i=0; i<vsSize; ++i) { 638 nodeList[i] = vertices[i] - skipCells + 1; 639 } 640 PetscStackCallStandard(ex_put_set_param,exo->exoid, EX_NODE_SET, vsIdx[vs], vsSize, 0); 641 PetscStackCallStandard(ex_put_set,exo->exoid, EX_NODE_SET, vsIdx[vs], nodeList, NULL); 642 PetscCall(ISRestoreIndices(stratumIS, &vertices)); 643 PetscCall(ISDestroy(&stratumIS)); 644 PetscCall(PetscFree(nodeList)); 645 } 646 PetscCall(ISRestoreIndices(vsIS, &vsIdx)); 647 PetscCall(ISDestroy(&vsIS)); 648 } 649 /* --- Side Sets/Face Sets --- */ 650 PetscCall(DMHasLabel(dm, "Face Sets", &hasLabel)); 651 if (hasLabel) { 652 PetscInt i, j, fs, fsSize; 653 const PetscInt *fsIdx, *faces; 654 IS fsIS, stratumIS; 655 DMLabel fsLabel; 656 PetscInt numPoints, *points; 657 PetscInt elem_list_size = 0; 658 PetscInt *elem_list, *elem_ind, *side_list; 659 660 PetscCall(DMGetLabel(dm, "Face Sets", &fsLabel)); 661 /* Compute size of Node List and Element List */ 662 PetscCall(DMLabelGetValueIS(fsLabel, &fsIS)); 663 PetscCall(ISGetIndices(fsIS, &fsIdx)); 664 for (fs=0; fs<num_fs; ++fs) { 665 PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS)); 666 PetscCall(ISGetSize(stratumIS, &fsSize)); 667 elem_list_size += fsSize; 668 PetscCall(ISDestroy(&stratumIS)); 669 } 670 PetscCall(PetscMalloc3(num_fs, &elem_ind,elem_list_size, &elem_list,elem_list_size, &side_list)); 671 elem_ind[0] = 0; 672 for (fs=0; fs<num_fs; ++fs) { 673 PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS)); 674 PetscCall(ISGetIndices(stratumIS, &faces)); 675 PetscCall(ISGetSize(stratumIS, &fsSize)); 676 /* Set Parameters */ 677 PetscStackCallStandard(ex_put_set_param,exo->exoid, EX_SIDE_SET, fsIdx[fs], fsSize, 0); 678 /* Indices */ 679 if (fs<num_fs-1) { 680 elem_ind[fs+1] = elem_ind[fs] + fsSize; 681 } 682 683 for (i=0; i<fsSize; ++i) { 684 /* Element List */ 685 points = NULL; 686 PetscCall(DMPlexGetTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points)); 687 elem_list[elem_ind[fs] + i] = points[2] +1; 688 PetscCall(DMPlexRestoreTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points)); 689 690 /* Side List */ 691 points = NULL; 692 PetscCall(DMPlexGetTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points)); 693 for (j=1; j<numPoints; ++j) { 694 if (points[j*2]==faces[i]) {break;} 695 } 696 /* Convert HEX sides */ 697 if (numPoints == 27) { 698 if (j == 1) {j = 5;} 699 else if (j == 2) {j = 6;} 700 else if (j == 3) {j = 1;} 701 else if (j == 4) {j = 3;} 702 else if (j == 5) {j = 2;} 703 else if (j == 6) {j = 4;} 704 } 705 /* Convert TET sides */ 706 if (numPoints == 15) { 707 --j; 708 if (j == 0) {j = 4;} 709 } 710 side_list[elem_ind[fs] + i] = j; 711 PetscCall(DMPlexRestoreTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points)); 712 713 } 714 PetscCall(ISRestoreIndices(stratumIS, &faces)); 715 PetscCall(ISDestroy(&stratumIS)); 716 } 717 PetscCall(ISRestoreIndices(fsIS, &fsIdx)); 718 PetscCall(ISDestroy(&fsIS)); 719 720 /* Put side sets */ 721 for (fs=0; fs<num_fs; ++fs) { 722 PetscStackCallStandard(ex_put_set,exo->exoid, EX_SIDE_SET, fsIdx[fs], &elem_list[elem_ind[fs]], &side_list[elem_ind[fs]]); 723 } 724 PetscCall(PetscFree3(elem_ind,elem_list,side_list)); 725 } 726 /* 727 close the exodus file 728 */ 729 ex_close(exo->exoid); 730 exo->exoid = -1; 731 } 732 PetscCall(PetscSectionDestroy(&coordSection)); 733 734 /* 735 reopen the file in parallel 736 */ 737 EXO_mode = EX_WRITE; 738 #if defined(PETSC_USE_64BIT_INDICES) 739 EXO_mode += EX_ALL_INT64_API; 740 #endif 741 CPU_word_size = sizeof(PetscReal); 742 IO_word_size = sizeof(PetscReal); 743 exo->exoid = ex_open_par(exo->filename,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,comm,MPI_INFO_NULL); 744 PetscCheck(exo->exoid >= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", exo->filename); 745 PetscFunctionReturn(0); 746 } 747 748 /* 749 VecView_PlexExodusII_Internal - Write a Vec corresponding in an exodus file 750 751 Collective on v 752 753 Input Parameters: 754 + v - The vector to be written 755 - viewer - The PetscViewerExodusII viewer associate to an exodus file 756 757 Notes: 758 The exodus result variable index is obtained by comparing the Vec name and the 759 names of variables declared in the exodus file. For instance for a Vec named "V" 760 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 761 amongst all variables. 762 In the event where a nodal and zonal variable both match, the function will return an error instead of 763 possibly corrupting the file 764 765 Level: beginner 766 767 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII() 768 @*/ 769 PetscErrorCode VecView_PlexExodusII_Internal(Vec v, PetscViewer viewer) 770 { 771 DM dm; 772 MPI_Comm comm; 773 PetscMPIInt rank; 774 775 int exoid,offsetN = 0, offsetZ = 0; 776 const char *vecname; 777 PetscInt step; 778 779 PetscFunctionBegin; 780 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 781 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 782 PetscCall(PetscViewerExodusIIGetId(viewer,&exoid)); 783 PetscCall(VecGetDM(v, &dm)); 784 PetscCall(PetscObjectGetName((PetscObject) v, &vecname)); 785 786 PetscCall(DMGetOutputSequenceNumber(dm,&step,NULL)); 787 PetscCall(EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN)); 788 PetscCall(EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ)); 789 PetscCheckFalse(offsetN <= 0 && offsetZ <= 0,comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname); 790 if (offsetN > 0) { 791 PetscCall(VecViewPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN)); 792 } else if (offsetZ > 0) { 793 PetscCall(VecViewPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ)); 794 } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname); 795 PetscFunctionReturn(0); 796 } 797 798 /* 799 VecLoad_PlexExodusII_Internal - Write a Vec corresponding in an exodus file 800 801 Collective on v 802 803 Input Parameters: 804 + v - The vector to be written 805 - viewer - The PetscViewerExodusII viewer associate to an exodus file 806 807 Notes: 808 The exodus result variable index is obtained by comparing the Vec name and the 809 names of variables declared in the exodus file. For instance for a Vec named "V" 810 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 811 amongst all variables. 812 In the event where a nodal and zonal variable both match, the function will return an error instead of 813 possibly corrupting the file 814 815 Level: beginner 816 817 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII() 818 @*/ 819 PetscErrorCode VecLoad_PlexExodusII_Internal(Vec v, PetscViewer viewer) 820 { 821 DM dm; 822 MPI_Comm comm; 823 PetscMPIInt rank; 824 825 int exoid,offsetN = 0, offsetZ = 0; 826 const char *vecname; 827 PetscInt step; 828 829 PetscFunctionBegin; 830 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 831 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 832 PetscCall(PetscViewerExodusIIGetId(viewer,&exoid)); 833 PetscCall(VecGetDM(v, &dm)); 834 PetscCall(PetscObjectGetName((PetscObject) v, &vecname)); 835 836 PetscCall(DMGetOutputSequenceNumber(dm,&step,NULL)); 837 PetscCall(EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN)); 838 PetscCall(EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ)); 839 PetscCheckFalse(offsetN <= 0 && offsetZ <= 0,comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname); 840 if (offsetN > 0) { 841 PetscCall(VecLoadPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN)); 842 } else if (offsetZ > 0) { 843 PetscCall(VecLoadPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ)); 844 } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname); 845 PetscFunctionReturn(0); 846 } 847 848 /* 849 VecViewPlex_ExodusII_Nodal_Internal - Write a Vec corresponding to a nodal field to an exodus file 850 851 Collective on v 852 853 Input Parameters: 854 + v - The vector to be written 855 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 856 . step - the time step to write at (exodus steps are numbered starting from 1) 857 - offset - the location of the variable in the file 858 859 Notes: 860 The exodus result nodal variable index is obtained by comparing the Vec name and the 861 names of nodal variables declared in the exodus file. For instance for a Vec named "V" 862 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 863 amongst all nodal variables. 864 865 Level: beginner 866 867 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecLoadNodal_PlexEXO(),VecViewZonal_PlexEXO(),VecLoadZonal_PlexEXO() 868 @*/ 869 PetscErrorCode VecViewPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset) 870 { 871 MPI_Comm comm; 872 PetscMPIInt size; 873 DM dm; 874 Vec vNatural, vComp; 875 const PetscScalar *varray; 876 PetscInt xs, xe, bs; 877 PetscBool useNatural; 878 879 PetscFunctionBegin; 880 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 881 PetscCallMPI(MPI_Comm_size(comm, &size)); 882 PetscCall(VecGetDM(v, &dm)); 883 PetscCall(DMGetUseNatural(dm, &useNatural)); 884 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 885 if (useNatural) { 886 PetscCall(DMGetGlobalVector(dm, &vNatural)); 887 PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural)); 888 PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural)); 889 } else { 890 vNatural = v; 891 } 892 893 /* Write local chunk of the result in the exodus file 894 exodus stores each component of a vector-valued field as a separate variable. 895 We assume that they are stored sequentially */ 896 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 897 PetscCall(VecGetBlockSize(vNatural, &bs)); 898 if (bs == 1) { 899 PetscCall(VecGetArrayRead(vNatural, &varray)); 900 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray); 901 PetscCall(VecRestoreArrayRead(vNatural, &varray)); 902 } else { 903 IS compIS; 904 PetscInt c; 905 906 PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 907 for (c = 0; c < bs; ++c) { 908 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 909 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 910 PetscCall(VecGetArrayRead(vComp, &varray)); 911 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray); 912 PetscCall(VecRestoreArrayRead(vComp, &varray)); 913 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 914 } 915 PetscCall(ISDestroy(&compIS)); 916 } 917 if (useNatural) PetscCall(DMRestoreGlobalVector(dm, &vNatural)); 918 PetscFunctionReturn(0); 919 } 920 921 /* 922 VecLoadPlex_ExodusII_Nodal_Internal - Read a Vec corresponding to a nodal field from an exodus file 923 924 Collective on v 925 926 Input Parameters: 927 + v - The vector to be written 928 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 929 . step - the time step to read at (exodus steps are numbered starting from 1) 930 - offset - the location of the variable in the file 931 932 Notes: 933 The exodus result nodal variable index is obtained by comparing the Vec name and the 934 names of nodal variables declared in the exodus file. For instance for a Vec named "V" 935 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 936 amongst all nodal variables. 937 938 Level: beginner 939 940 .seealso: EXOGetVarIndex_Internal(), DMPlexView_ExodusII_Internal(), VecViewPlex_ExodusII_Nodal_Internal(), VecViewZonal_PlexEXO(), VecLoadZonal_PlexEXO() 941 */ 942 PetscErrorCode VecLoadPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset) 943 { 944 MPI_Comm comm; 945 PetscMPIInt size; 946 DM dm; 947 Vec vNatural, vComp; 948 PetscScalar *varray; 949 PetscInt xs, xe, bs; 950 PetscBool useNatural; 951 952 PetscFunctionBegin; 953 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 954 PetscCallMPI(MPI_Comm_size(comm, &size)); 955 PetscCall(VecGetDM(v,&dm)); 956 PetscCall(DMGetUseNatural(dm, &useNatural)); 957 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 958 if (useNatural) PetscCall(DMGetGlobalVector(dm,&vNatural)); 959 else {vNatural = v;} 960 961 /* Read local chunk from the file */ 962 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 963 PetscCall(VecGetBlockSize(vNatural, &bs)); 964 if (bs == 1) { 965 PetscCall(VecGetArray(vNatural, &varray)); 966 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray); 967 PetscCall(VecRestoreArray(vNatural, &varray)); 968 } else { 969 IS compIS; 970 PetscInt c; 971 972 PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 973 for (c = 0; c < bs; ++c) { 974 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 975 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 976 PetscCall(VecGetArray(vComp, &varray)); 977 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray); 978 PetscCall(VecRestoreArray(vComp, &varray)); 979 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 980 } 981 PetscCall(ISDestroy(&compIS)); 982 } 983 if (useNatural) { 984 PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v)); 985 PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v)); 986 PetscCall(DMRestoreGlobalVector(dm, &vNatural)); 987 } 988 PetscFunctionReturn(0); 989 } 990 991 /* 992 VecViewPlex_ExodusII_Zonal_Internal - Write a Vec corresponding to a zonal (cell based) field to an exodus file 993 994 Collective on v 995 996 Input Parameters: 997 + v - The vector to be written 998 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 999 . step - the time step to write at (exodus steps are numbered starting from 1) 1000 - offset - the location of the variable in the file 1001 1002 Notes: 1003 The exodus result zonal variable index is obtained by comparing the Vec name and the 1004 names of zonal variables declared in the exodus file. For instance for a Vec named "V" 1005 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 1006 amongst all zonal variables. 1007 1008 Level: beginner 1009 1010 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Zonal_Internal() 1011 */ 1012 PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset) 1013 { 1014 MPI_Comm comm; 1015 PetscMPIInt size; 1016 DM dm; 1017 Vec vNatural, vComp; 1018 const PetscScalar *varray; 1019 PetscInt xs, xe, bs; 1020 PetscBool useNatural; 1021 IS compIS; 1022 PetscInt *csSize, *csID; 1023 PetscInt numCS, set, csxs = 0; 1024 1025 PetscFunctionBegin; 1026 PetscCall(PetscObjectGetComm((PetscObject)v, &comm)); 1027 PetscCallMPI(MPI_Comm_size(comm, &size)); 1028 PetscCall(VecGetDM(v, &dm)); 1029 PetscCall(DMGetUseNatural(dm, &useNatural)); 1030 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 1031 if (useNatural) { 1032 PetscCall(DMGetGlobalVector(dm, &vNatural)); 1033 PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural)); 1034 PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural)); 1035 } else { 1036 vNatural = v; 1037 } 1038 1039 /* Write local chunk of the result in the exodus file 1040 exodus stores each component of a vector-valued field as a separate variable. 1041 We assume that they are stored sequentially 1042 Zonal variables are accessed one element block at a time, so we loop through the cell sets, 1043 but once the vector has been reordered to natural size, we cannot use the label information 1044 to figure out what to save where. */ 1045 numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK); 1046 PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize)); 1047 PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, csID); 1048 for (set = 0; set < numCS; ++set) { 1049 ex_block block; 1050 1051 block.id = csID[set]; 1052 block.type = EX_ELEM_BLOCK; 1053 PetscStackCallStandard(ex_get_block_param,exoid, &block); 1054 csSize[set] = block.num_entry; 1055 } 1056 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 1057 PetscCall(VecGetBlockSize(vNatural, &bs)); 1058 if (bs > 1) PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 1059 for (set = 0; set < numCS; set++) { 1060 PetscInt csLocalSize, c; 1061 1062 /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1 1063 local slice of zonal values: xs/bs,xm/bs-1 1064 intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */ 1065 csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs)); 1066 if (bs == 1) { 1067 PetscCall(VecGetArrayRead(vNatural, &varray)); 1068 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]); 1069 PetscCall(VecRestoreArrayRead(vNatural, &varray)); 1070 } else { 1071 for (c = 0; c < bs; ++c) { 1072 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 1073 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 1074 PetscCall(VecGetArrayRead(vComp, &varray)); 1075 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]); 1076 PetscCall(VecRestoreArrayRead(vComp, &varray)); 1077 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 1078 } 1079 } 1080 csxs += csSize[set]; 1081 } 1082 PetscCall(PetscFree2(csID, csSize)); 1083 if (bs > 1) PetscCall(ISDestroy(&compIS)); 1084 if (useNatural) PetscCall(DMRestoreGlobalVector(dm,&vNatural)); 1085 PetscFunctionReturn(0); 1086 } 1087 1088 /* 1089 VecLoadPlex_ExodusII_Zonal_Internal - Read a Vec corresponding to a zonal (cell based) field from an exodus file 1090 1091 Collective on v 1092 1093 Input Parameters: 1094 + v - The vector to be written 1095 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 1096 . step - the time step to read at (exodus steps are numbered starting from 1) 1097 - offset - the location of the variable in the file 1098 1099 Notes: 1100 The exodus result zonal variable index is obtained by comparing the Vec name and the 1101 names of zonal variables declared in the exodus file. For instance for a Vec named "V" 1102 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 1103 amongst all zonal variables. 1104 1105 Level: beginner 1106 1107 .seealso: EXOGetVarIndex_Internal(), DMPlexView_ExodusII_Internal(), VecViewPlex_ExodusII_Nodal_Internal(), VecLoadPlex_ExodusII_Nodal_Internal(), VecLoadPlex_ExodusII_Zonal_Internal() 1108 */ 1109 PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset) 1110 { 1111 MPI_Comm comm; 1112 PetscMPIInt size; 1113 DM dm; 1114 Vec vNatural, vComp; 1115 PetscScalar *varray; 1116 PetscInt xs, xe, bs; 1117 PetscBool useNatural; 1118 IS compIS; 1119 PetscInt *csSize, *csID; 1120 PetscInt numCS, set, csxs = 0; 1121 1122 PetscFunctionBegin; 1123 PetscCall(PetscObjectGetComm((PetscObject)v,&comm)); 1124 PetscCallMPI(MPI_Comm_size(comm, &size)); 1125 PetscCall(VecGetDM(v, &dm)); 1126 PetscCall(DMGetUseNatural(dm, &useNatural)); 1127 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 1128 if (useNatural) PetscCall(DMGetGlobalVector(dm,&vNatural)); 1129 else {vNatural = v;} 1130 1131 /* Read local chunk of the result in the exodus file 1132 exodus stores each component of a vector-valued field as a separate variable. 1133 We assume that they are stored sequentially 1134 Zonal variables are accessed one element block at a time, so we loop through the cell sets, 1135 but once the vector has been reordered to natural size, we cannot use the label information 1136 to figure out what to save where. */ 1137 numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK); 1138 PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize)); 1139 PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, csID); 1140 for (set = 0; set < numCS; ++set) { 1141 ex_block block; 1142 1143 block.id = csID[set]; 1144 block.type = EX_ELEM_BLOCK; 1145 PetscStackCallStandard(ex_get_block_param,exoid, &block); 1146 csSize[set] = block.num_entry; 1147 } 1148 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 1149 PetscCall(VecGetBlockSize(vNatural, &bs)); 1150 if (bs > 1) PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 1151 for (set = 0; set < numCS; ++set) { 1152 PetscInt csLocalSize, c; 1153 1154 /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1 1155 local slice of zonal values: xs/bs,xm/bs-1 1156 intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */ 1157 csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs)); 1158 if (bs == 1) { 1159 PetscCall(VecGetArray(vNatural, &varray)); 1160 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]); 1161 PetscCall(VecRestoreArray(vNatural, &varray)); 1162 } else { 1163 for (c = 0; c < bs; ++c) { 1164 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 1165 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 1166 PetscCall(VecGetArray(vComp, &varray)); 1167 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]); 1168 PetscCall(VecRestoreArray(vComp, &varray)); 1169 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 1170 } 1171 } 1172 csxs += csSize[set]; 1173 } 1174 PetscCall(PetscFree2(csID, csSize)); 1175 if (bs > 1) PetscCall(ISDestroy(&compIS)); 1176 if (useNatural) { 1177 PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v)); 1178 PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v)); 1179 PetscCall(DMRestoreGlobalVector(dm, &vNatural)); 1180 } 1181 PetscFunctionReturn(0); 1182 } 1183 #endif 1184 1185 /*@ 1186 PetscViewerExodusIIGetId - Get the file id of the ExodusII file 1187 1188 Logically Collective on PetscViewer 1189 1190 Input Parameter: 1191 . viewer - the PetscViewer 1192 1193 Output Parameter: 1194 . exoid - The ExodusII file id 1195 1196 Level: intermediate 1197 1198 .seealso: PetscViewerFileSetMode(), PetscViewerCreate(), PetscViewerSetType(), PetscViewerBinaryOpen() 1199 @*/ 1200 PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid) 1201 { 1202 PetscFunctionBegin; 1203 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); 1204 PetscTryMethod(viewer, "PetscViewerGetId_C",(PetscViewer,int*),(viewer,exoid)); 1205 PetscFunctionReturn(0); 1206 } 1207 1208 /*@ 1209 PetscViewerExodusIISetOrder - Set the elements order in the exodusII file. 1210 1211 Collective 1212 1213 Input Parameters: 1214 + viewer - the viewer 1215 - order - elements order 1216 1217 Output Parameter: 1218 1219 Level: beginner 1220 1221 Note: 1222 1223 .seealso: PetscViewerExodusIIGetId(), PetscViewerExodusIIGetOrder(), PetscViewerExodusIISetOrder() 1224 @*/ 1225 PetscErrorCode PetscViewerExodusIISetOrder(PetscViewer viewer, PetscInt order) 1226 { 1227 PetscFunctionBegin; 1228 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); 1229 PetscTryMethod(viewer, "PetscViewerSetOrder_C",(PetscViewer,PetscInt),(viewer,order)); 1230 PetscFunctionReturn(0); 1231 } 1232 1233 /*@ 1234 PetscViewerExodusIIGetOrder - Get the elements order in the exodusII file. 1235 1236 Collective 1237 1238 Input Parameters: 1239 + viewer - the viewer 1240 - order - elements order 1241 1242 Output Parameter: 1243 1244 Level: beginner 1245 1246 Note: 1247 1248 .seealso: PetscViewerExodusIIGetId(), PetscViewerExodusIIGetOrder(), PetscViewerExodusIISetOrder() 1249 @*/ 1250 PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order) 1251 { 1252 PetscFunctionBegin; 1253 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); 1254 PetscTryMethod(viewer, "PetscViewerGetOrder_C",(PetscViewer,PetscInt*),(viewer,order)); 1255 PetscFunctionReturn(0); 1256 } 1257 1258 /*@C 1259 PetscViewerExodusIIOpen - Opens a file for ExodusII input/output. 1260 1261 Collective 1262 1263 Input Parameters: 1264 + comm - MPI communicator 1265 . name - name of file 1266 - type - type of file 1267 $ FILE_MODE_WRITE - create new file for binary output 1268 $ FILE_MODE_READ - open existing file for binary input 1269 $ FILE_MODE_APPEND - open existing file for binary output 1270 1271 Output Parameter: 1272 . exo - PetscViewer for Exodus II input/output to use with the specified file 1273 1274 Level: beginner 1275 1276 Note: 1277 This PetscViewer should be destroyed with PetscViewerDestroy(). 1278 1279 .seealso: PetscViewerPushFormat(), PetscViewerDestroy(), 1280 DMLoad(), PetscFileMode, PetscViewer, PetscViewerSetType(), PetscViewerFileSetMode(), PetscViewerFileSetName() 1281 @*/ 1282 PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo) 1283 { 1284 PetscFunctionBegin; 1285 PetscCall(PetscViewerCreate(comm, exo)); 1286 PetscCall(PetscViewerSetType(*exo, PETSCVIEWEREXODUSII)); 1287 PetscCall(PetscViewerFileSetMode(*exo, type)); 1288 PetscCall(PetscViewerFileSetName(*exo, name)); 1289 PetscCall(PetscViewerSetFromOptions(*exo)); 1290 PetscFunctionReturn(0); 1291 } 1292 1293 /*@C 1294 DMPlexCreateExodusFromFile - Create a DMPlex mesh from an ExodusII file. 1295 1296 Collective 1297 1298 Input Parameters: 1299 + comm - The MPI communicator 1300 . filename - The name of the ExodusII file 1301 - interpolate - Create faces and edges in the mesh 1302 1303 Output Parameter: 1304 . dm - The DM object representing the mesh 1305 1306 Level: beginner 1307 1308 .seealso: DMPLEX, DMCreate(), DMPlexCreateExodus() 1309 @*/ 1310 PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm) 1311 { 1312 PetscMPIInt rank; 1313 #if defined(PETSC_HAVE_EXODUSII) 1314 int CPU_word_size = sizeof(PetscReal), IO_word_size = 0, exoid = -1; 1315 float version; 1316 #endif 1317 1318 PetscFunctionBegin; 1319 PetscValidCharPointer(filename, 2); 1320 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1321 #if defined(PETSC_HAVE_EXODUSII) 1322 if (rank == 0) { 1323 exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version); 1324 PetscCheck(exoid > 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open(\"%s\",...) did not return a valid file ID", filename); 1325 } 1326 PetscCall(DMPlexCreateExodus(comm, exoid, interpolate, dm)); 1327 if (rank == 0) {PetscStackCallStandard(ex_close,exoid);} 1328 PetscFunctionReturn(0); 1329 #else 1330 SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii"); 1331 #endif 1332 } 1333 1334 #if defined(PETSC_HAVE_EXODUSII) 1335 static PetscErrorCode ExodusGetCellType_Internal(const char *elem_type, DMPolytopeType *ct) 1336 { 1337 PetscBool flg; 1338 1339 PetscFunctionBegin; 1340 *ct = DM_POLYTOPE_UNKNOWN; 1341 PetscCall(PetscStrcmp(elem_type, "TRI", &flg)); 1342 if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;} 1343 PetscCall(PetscStrcmp(elem_type, "TRI3", &flg)); 1344 if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;} 1345 PetscCall(PetscStrcmp(elem_type, "QUAD", &flg)); 1346 if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;} 1347 PetscCall(PetscStrcmp(elem_type, "QUAD4", &flg)); 1348 if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;} 1349 PetscCall(PetscStrcmp(elem_type, "SHELL4", &flg)); 1350 if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;} 1351 PetscCall(PetscStrcmp(elem_type, "TETRA", &flg)); 1352 if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;} 1353 PetscCall(PetscStrcmp(elem_type, "TET4", &flg)); 1354 if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;} 1355 PetscCall(PetscStrcmp(elem_type, "WEDGE", &flg)); 1356 if (flg) {*ct = DM_POLYTOPE_TRI_PRISM; goto done;} 1357 PetscCall(PetscStrcmp(elem_type, "HEX", &flg)); 1358 if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;} 1359 PetscCall(PetscStrcmp(elem_type, "HEX8", &flg)); 1360 if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;} 1361 PetscCall(PetscStrcmp(elem_type, "HEXAHEDRON", &flg)); 1362 if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;} 1363 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unrecognized element type %s", elem_type); 1364 done: 1365 PetscFunctionReturn(0); 1366 } 1367 #endif 1368 1369 /*@ 1370 DMPlexCreateExodus - Create a DMPlex mesh from an ExodusII file ID. 1371 1372 Collective 1373 1374 Input Parameters: 1375 + comm - The MPI communicator 1376 . exoid - The ExodusII id associated with a exodus file and obtained using ex_open 1377 - interpolate - Create faces and edges in the mesh 1378 1379 Output Parameter: 1380 . dm - The DM object representing the mesh 1381 1382 Level: beginner 1383 1384 .seealso: DMPLEX, DMCreate() 1385 @*/ 1386 PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool interpolate, DM *dm) 1387 { 1388 #if defined(PETSC_HAVE_EXODUSII) 1389 PetscMPIInt num_proc, rank; 1390 DMLabel cellSets = NULL, faceSets = NULL, vertSets = NULL; 1391 PetscSection coordSection; 1392 Vec coordinates; 1393 PetscScalar *coords; 1394 PetscInt coordSize, v; 1395 /* Read from ex_get_init() */ 1396 char title[PETSC_MAX_PATH_LEN+1]; 1397 int dim = 0, dimEmbed = 0, numVertices = 0, numCells = 0; 1398 int num_cs = 0, num_vs = 0, num_fs = 0; 1399 #endif 1400 1401 PetscFunctionBegin; 1402 #if defined(PETSC_HAVE_EXODUSII) 1403 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1404 PetscCallMPI(MPI_Comm_size(comm, &num_proc)); 1405 PetscCall(DMCreate(comm, dm)); 1406 PetscCall(DMSetType(*dm, DMPLEX)); 1407 /* Open EXODUS II file and read basic information on rank 0, then broadcast to all processors */ 1408 if (rank == 0) { 1409 PetscCall(PetscMemzero(title,PETSC_MAX_PATH_LEN+1)); 1410 PetscStackCallStandard(ex_get_init,exoid, title, &dimEmbed, &numVertices, &numCells, &num_cs, &num_vs, &num_fs); 1411 PetscCheck(num_cs,PETSC_COMM_SELF,PETSC_ERR_SUP,"Exodus file does not contain any cell set"); 1412 } 1413 PetscCallMPI(MPI_Bcast(title, PETSC_MAX_PATH_LEN+1, MPI_CHAR, 0, comm)); 1414 PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm)); 1415 PetscCall(PetscObjectSetName((PetscObject) *dm, title)); 1416 PetscCall(DMPlexSetChart(*dm, 0, numCells+numVertices)); 1417 /* We do not want this label automatically computed, instead we compute it here */ 1418 PetscCall(DMCreateLabel(*dm, "celltype")); 1419 1420 /* Read cell sets information */ 1421 if (rank == 0) { 1422 PetscInt *cone; 1423 int c, cs, ncs, c_loc, v, v_loc; 1424 /* Read from ex_get_elem_blk_ids() */ 1425 int *cs_id, *cs_order; 1426 /* Read from ex_get_elem_block() */ 1427 char buffer[PETSC_MAX_PATH_LEN+1]; 1428 int num_cell_in_set, num_vertex_per_cell, num_hybrid, num_attr; 1429 /* Read from ex_get_elem_conn() */ 1430 int *cs_connect; 1431 1432 /* Get cell sets IDs */ 1433 PetscCall(PetscMalloc2(num_cs, &cs_id, num_cs, &cs_order)); 1434 PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, cs_id); 1435 /* Read the cell set connectivity table and build mesh topology 1436 EXO standard requires that cells in cell sets be numbered sequentially and be pairwise disjoint. */ 1437 /* Check for a hybrid mesh */ 1438 for (cs = 0, num_hybrid = 0; cs < num_cs; ++cs) { 1439 DMPolytopeType ct; 1440 char elem_type[PETSC_MAX_PATH_LEN]; 1441 1442 PetscCall(PetscArrayzero(elem_type, sizeof(elem_type))); 1443 PetscStackCallStandard(ex_get_elem_type,exoid, cs_id[cs], elem_type); 1444 PetscCall(ExodusGetCellType_Internal(elem_type, &ct)); 1445 dim = PetscMax(dim, DMPolytopeTypeGetDim(ct)); 1446 PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr); 1447 switch (ct) { 1448 case DM_POLYTOPE_TRI_PRISM: 1449 cs_order[cs] = cs; 1450 ++num_hybrid; 1451 break; 1452 default: 1453 for (c = cs; c > cs-num_hybrid; --c) cs_order[c] = cs_order[c-1]; 1454 cs_order[cs-num_hybrid] = cs; 1455 } 1456 } 1457 /* First set sizes */ 1458 for (ncs = 0, c = 0; ncs < num_cs; ++ncs) { 1459 DMPolytopeType ct; 1460 char elem_type[PETSC_MAX_PATH_LEN]; 1461 const PetscInt cs = cs_order[ncs]; 1462 1463 PetscCall(PetscArrayzero(elem_type, sizeof(elem_type))); 1464 PetscStackCallStandard(ex_get_elem_type,exoid, cs_id[cs], elem_type); 1465 PetscCall(ExodusGetCellType_Internal(elem_type, &ct)); 1466 PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr); 1467 for (c_loc = 0; c_loc < num_cell_in_set; ++c_loc, ++c) { 1468 PetscCall(DMPlexSetConeSize(*dm, c, num_vertex_per_cell)); 1469 PetscCall(DMPlexSetCellType(*dm, c, ct)); 1470 } 1471 } 1472 for (v = numCells; v < numCells+numVertices; ++v) PetscCall(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT)); 1473 PetscCall(DMSetUp(*dm)); 1474 for (ncs = 0, c = 0; ncs < num_cs; ++ncs) { 1475 const PetscInt cs = cs_order[ncs]; 1476 PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr); 1477 PetscCall(PetscMalloc2(num_vertex_per_cell*num_cell_in_set,&cs_connect,num_vertex_per_cell,&cone)); 1478 PetscStackCallStandard(ex_get_conn,exoid, EX_ELEM_BLOCK, cs_id[cs], cs_connect,NULL,NULL); 1479 /* EXO uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */ 1480 for (c_loc = 0, v = 0; c_loc < num_cell_in_set; ++c_loc, ++c) { 1481 DMPolytopeType ct; 1482 1483 for (v_loc = 0; v_loc < num_vertex_per_cell; ++v_loc, ++v) { 1484 cone[v_loc] = cs_connect[v]+numCells-1; 1485 } 1486 PetscCall(DMPlexGetCellType(*dm, c, &ct)); 1487 PetscCall(DMPlexInvertCell(ct, cone)); 1488 PetscCall(DMPlexSetCone(*dm, c, cone)); 1489 PetscCall(DMSetLabelValue_Fast(*dm, &cellSets, "Cell Sets", c, cs_id[cs])); 1490 } 1491 PetscCall(PetscFree2(cs_connect,cone)); 1492 } 1493 PetscCall(PetscFree2(cs_id, cs_order)); 1494 } 1495 { 1496 PetscInt ints[] = {dim, dimEmbed}; 1497 1498 PetscCallMPI(MPI_Bcast(ints, 2, MPIU_INT, 0, comm)); 1499 PetscCall(DMSetDimension(*dm, ints[0])); 1500 PetscCall(DMSetCoordinateDim(*dm, ints[1])); 1501 dim = ints[0]; 1502 dimEmbed = ints[1]; 1503 } 1504 PetscCall(DMPlexSymmetrize(*dm)); 1505 PetscCall(DMPlexStratify(*dm)); 1506 if (interpolate) { 1507 DM idm; 1508 1509 PetscCall(DMPlexInterpolate(*dm, &idm)); 1510 PetscCall(DMDestroy(dm)); 1511 *dm = idm; 1512 } 1513 1514 /* Create vertex set label */ 1515 if (rank == 0 && (num_vs > 0)) { 1516 int vs, v; 1517 /* Read from ex_get_node_set_ids() */ 1518 int *vs_id; 1519 /* Read from ex_get_node_set_param() */ 1520 int num_vertex_in_set; 1521 /* Read from ex_get_node_set() */ 1522 int *vs_vertex_list; 1523 1524 /* Get vertex set ids */ 1525 PetscCall(PetscMalloc1(num_vs, &vs_id)); 1526 PetscStackCallStandard(ex_get_ids,exoid, EX_NODE_SET, vs_id); 1527 for (vs = 0; vs < num_vs; ++vs) { 1528 PetscStackCallStandard(ex_get_set_param,exoid, EX_NODE_SET, vs_id[vs], &num_vertex_in_set, NULL); 1529 PetscCall(PetscMalloc1(num_vertex_in_set, &vs_vertex_list)); 1530 PetscStackCallStandard(ex_get_set,exoid, EX_NODE_SET, vs_id[vs], vs_vertex_list, NULL); 1531 for (v = 0; v < num_vertex_in_set; ++v) { 1532 PetscCall(DMSetLabelValue_Fast(*dm, &vertSets, "Vertex Sets", vs_vertex_list[v]+numCells-1, vs_id[vs])); 1533 } 1534 PetscCall(PetscFree(vs_vertex_list)); 1535 } 1536 PetscCall(PetscFree(vs_id)); 1537 } 1538 /* Read coordinates */ 1539 PetscCall(DMGetCoordinateSection(*dm, &coordSection)); 1540 PetscCall(PetscSectionSetNumFields(coordSection, 1)); 1541 PetscCall(PetscSectionSetFieldComponents(coordSection, 0, dimEmbed)); 1542 PetscCall(PetscSectionSetChart(coordSection, numCells, numCells + numVertices)); 1543 for (v = numCells; v < numCells+numVertices; ++v) { 1544 PetscCall(PetscSectionSetDof(coordSection, v, dimEmbed)); 1545 PetscCall(PetscSectionSetFieldDof(coordSection, v, 0, dimEmbed)); 1546 } 1547 PetscCall(PetscSectionSetUp(coordSection)); 1548 PetscCall(PetscSectionGetStorageSize(coordSection, &coordSize)); 1549 PetscCall(VecCreate(PETSC_COMM_SELF, &coordinates)); 1550 PetscCall(PetscObjectSetName((PetscObject) coordinates, "coordinates")); 1551 PetscCall(VecSetSizes(coordinates, coordSize, PETSC_DETERMINE)); 1552 PetscCall(VecSetBlockSize(coordinates, dimEmbed)); 1553 PetscCall(VecSetType(coordinates,VECSTANDARD)); 1554 PetscCall(VecGetArray(coordinates, &coords)); 1555 if (rank == 0) { 1556 PetscReal *x, *y, *z; 1557 1558 PetscCall(PetscMalloc3(numVertices,&x,numVertices,&y,numVertices,&z)); 1559 PetscStackCallStandard(ex_get_coord,exoid, x, y, z); 1560 if (dimEmbed > 0) { 1561 for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+0] = x[v]; 1562 } 1563 if (dimEmbed > 1) { 1564 for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+1] = y[v]; 1565 } 1566 if (dimEmbed > 2) { 1567 for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+2] = z[v]; 1568 } 1569 PetscCall(PetscFree3(x,y,z)); 1570 } 1571 PetscCall(VecRestoreArray(coordinates, &coords)); 1572 PetscCall(DMSetCoordinatesLocal(*dm, coordinates)); 1573 PetscCall(VecDestroy(&coordinates)); 1574 1575 /* Create side set label */ 1576 if (rank == 0 && interpolate && (num_fs > 0)) { 1577 int fs, f, voff; 1578 /* Read from ex_get_side_set_ids() */ 1579 int *fs_id; 1580 /* Read from ex_get_side_set_param() */ 1581 int num_side_in_set; 1582 /* Read from ex_get_side_set_node_list() */ 1583 int *fs_vertex_count_list, *fs_vertex_list; 1584 /* Read side set labels */ 1585 char fs_name[MAX_STR_LENGTH+1]; 1586 1587 /* Get side set ids */ 1588 PetscCall(PetscMalloc1(num_fs, &fs_id)); 1589 PetscStackCallStandard(ex_get_ids,exoid, EX_SIDE_SET, fs_id); 1590 for (fs = 0; fs < num_fs; ++fs) { 1591 PetscStackCallStandard(ex_get_set_param,exoid, EX_SIDE_SET, fs_id[fs], &num_side_in_set, NULL); 1592 PetscCall(PetscMalloc2(num_side_in_set,&fs_vertex_count_list,num_side_in_set*4,&fs_vertex_list)); 1593 PetscStackCallStandard(ex_get_side_set_node_list,exoid, fs_id[fs], fs_vertex_count_list, fs_vertex_list); 1594 /* Get the specific name associated with this side set ID. */ 1595 int fs_name_err = ex_get_name(exoid, EX_SIDE_SET, fs_id[fs], fs_name); 1596 for (f = 0, voff = 0; f < num_side_in_set; ++f) { 1597 const PetscInt *faces = NULL; 1598 PetscInt faceSize = fs_vertex_count_list[f], numFaces; 1599 PetscInt faceVertices[4], v; 1600 1601 PetscCheck(faceSize <= 4,comm, PETSC_ERR_ARG_WRONG, "ExodusII side cannot have %d > 4 vertices", faceSize); 1602 for (v = 0; v < faceSize; ++v, ++voff) { 1603 faceVertices[v] = fs_vertex_list[voff]+numCells-1; 1604 } 1605 PetscCall(DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces)); 1606 PetscCheck(numFaces == 1,comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %d faces", f, fs, numFaces); 1607 PetscCall(DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs])); 1608 /* Only add the label if one has been detected for this side set. */ 1609 if (!fs_name_err) { 1610 PetscCall(DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs])); 1611 } 1612 PetscCall(DMPlexRestoreJoin(*dm, faceSize, faceVertices, &numFaces, &faces)); 1613 } 1614 PetscCall(PetscFree2(fs_vertex_count_list,fs_vertex_list)); 1615 } 1616 PetscCall(PetscFree(fs_id)); 1617 } 1618 1619 { /* Create Cell/Face/Vertex Sets labels at all processes */ 1620 enum {n = 3}; 1621 PetscBool flag[n]; 1622 1623 flag[0] = cellSets ? PETSC_TRUE : PETSC_FALSE; 1624 flag[1] = faceSets ? PETSC_TRUE : PETSC_FALSE; 1625 flag[2] = vertSets ? PETSC_TRUE : PETSC_FALSE; 1626 PetscCallMPI(MPI_Bcast(flag, n, MPIU_BOOL, 0, comm)); 1627 if (flag[0]) PetscCall(DMCreateLabel(*dm, "Cell Sets")); 1628 if (flag[1]) PetscCall(DMCreateLabel(*dm, "Face Sets")); 1629 if (flag[2]) PetscCall(DMCreateLabel(*dm, "Vertex Sets")); 1630 } 1631 PetscFunctionReturn(0); 1632 #else 1633 SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii"); 1634 #endif 1635 } 1636