1 #define PETSCDM_DLL 2 #include <petsc/private/dmpleximpl.h> /*I "petscdmplex.h" I*/ 3 4 #if defined(PETSC_HAVE_EXODUSII) 5 #include <netcdf.h> 6 #include <exodusII.h> 7 #endif 8 9 #include <petsc/private/viewerimpl.h> 10 #include <petsc/private/viewerexodusiiimpl.h> 11 #if defined(PETSC_HAVE_EXODUSII) 12 /* 13 PETSC_VIEWER_EXODUSII_ - Creates an ExodusII PetscViewer shared by all processors in a communicator. 14 15 Collective 16 17 Input Parameter: 18 . comm - the MPI communicator to share the ExodusII PetscViewer 19 20 Level: intermediate 21 22 Notes: 23 misses Fortran bindings 24 25 Notes: 26 Unlike almost all other PETSc routines, PETSC_VIEWER_EXODUSII_ does not return 27 an error code. The GLVIS PetscViewer is usually used in the form 28 $ XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm)); 29 30 .seealso: `PetscViewerExodusIIOpen()`, `PetscViewerType`, `PetscViewerCreate()`, `PetscViewerDestroy()` 31 */ 32 PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm) 33 { 34 PetscViewer viewer; 35 PetscErrorCode ierr; 36 37 PetscFunctionBegin; 38 ierr = PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer); 39 if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} 40 ierr = PetscObjectRegisterDestroy((PetscObject) viewer); 41 if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} 42 PetscFunctionReturn(viewer); 43 } 44 45 static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer) 46 { 47 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) v->data; 48 49 PetscFunctionBegin; 50 if (exo->filename) PetscCall(PetscViewerASCIIPrintf(viewer, "Filename: %s\n", exo->filename)); 51 if (exo->exoid) PetscCall(PetscViewerASCIIPrintf(viewer, "exoid: %d\n", exo->exoid)); 52 if (exo->btype) PetscCall(PetscViewerASCIIPrintf(viewer, "IO Mode: %d\n", exo->btype)); 53 if (exo->order) PetscCall(PetscViewerASCIIPrintf(viewer, "Mesh order: %d\n", exo->order)); 54 PetscFunctionReturn(0); 55 } 56 57 static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscOptionItems *PetscOptionsObject, PetscViewer v) 58 { 59 PetscFunctionBegin; 60 PetscOptionsHeadBegin(PetscOptionsObject, "ExodusII PetscViewer Options"); 61 PetscOptionsHeadEnd(); 62 PetscFunctionReturn(0); 63 } 64 65 static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer) 66 { 67 PetscFunctionBegin; 68 PetscFunctionReturn(0); 69 } 70 71 static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer) 72 { 73 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 74 75 PetscFunctionBegin; 76 if (exo->exoid >= 0) {PetscStackCallStandard(ex_close,exo->exoid);} 77 PetscCall(PetscFree(exo->filename)); 78 PetscCall(PetscFree(exo)); 79 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetName_C",NULL)); 80 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileGetName_C",NULL)); 81 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetMode_C",NULL)); 82 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetId_C",NULL)); 83 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetOrder_C",NULL)); 84 PetscCall(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerSetOrder_C",NULL)); 85 PetscFunctionReturn(0); 86 } 87 88 static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[]) 89 { 90 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 91 PetscMPIInt rank; 92 int CPU_word_size, IO_word_size, EXO_mode; 93 MPI_Info mpi_info = MPI_INFO_NULL; 94 float EXO_version; 95 96 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank)); 97 CPU_word_size = sizeof(PetscReal); 98 IO_word_size = sizeof(PetscReal); 99 100 PetscFunctionBegin; 101 if (exo->exoid >= 0) { 102 PetscStackCallStandard(ex_close,exo->exoid); 103 exo->exoid = -1; 104 } 105 if (exo->filename) PetscCall(PetscFree(exo->filename)); 106 PetscCall(PetscStrallocpy(name, &exo->filename)); 107 switch (exo->btype) { 108 case FILE_MODE_READ: 109 EXO_mode = EX_READ; 110 break; 111 case FILE_MODE_APPEND: 112 case FILE_MODE_UPDATE: 113 case FILE_MODE_APPEND_UPDATE: 114 /* Will fail if the file does not already exist */ 115 EXO_mode = EX_WRITE; 116 break; 117 case FILE_MODE_WRITE: 118 /* 119 exodus only allows writing geometry upon file creation, so we will let DMView create the file. 120 */ 121 PetscFunctionReturn(0); 122 break; 123 default: 124 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()"); 125 } 126 #if defined(PETSC_USE_64BIT_INDICES) 127 EXO_mode += EX_ALL_INT64_API; 128 #endif 129 exo->exoid = ex_open_par(name,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,PETSC_COMM_WORLD,mpi_info); 130 PetscCheck(exo->exoid >= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", name); 131 PetscFunctionReturn(0); 132 } 133 134 static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name) 135 { 136 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 137 138 PetscFunctionBegin; 139 *name = exo->filename; 140 PetscFunctionReturn(0); 141 } 142 143 static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type) 144 { 145 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 146 147 PetscFunctionBegin; 148 exo->btype = type; 149 PetscFunctionReturn(0); 150 } 151 152 static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type) 153 { 154 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 155 156 PetscFunctionBegin; 157 *type = exo->btype; 158 PetscFunctionReturn(0); 159 } 160 161 static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid) 162 { 163 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 164 165 PetscFunctionBegin; 166 *exoid = exo->exoid; 167 PetscFunctionReturn(0); 168 } 169 170 static PetscErrorCode PetscViewerExodusIIGetOrder_ExodusII(PetscViewer viewer, PetscInt *order) 171 { 172 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 173 174 PetscFunctionBegin; 175 *order = exo->order; 176 PetscFunctionReturn(0); 177 } 178 179 static PetscErrorCode PetscViewerExodusIISetOrder_ExodusII(PetscViewer viewer, PetscInt order) 180 { 181 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 182 183 PetscFunctionBegin; 184 exo->order = order; 185 PetscFunctionReturn(0); 186 } 187 188 /*MC 189 PETSCVIEWEREXODUSII - A viewer that writes to an Exodus II file 190 191 .seealso: `PetscViewerExodusIIOpen()`, `PetscViewerCreate()`, `PETSCVIEWERBINARY`, `PETSCVIEWERHDF5`, `DMView()`, 192 `PetscViewerFileSetName()`, `PetscViewerFileSetMode()`, `PetscViewerFormat`, `PetscViewerType`, `PetscViewerSetType()` 193 194 Level: beginner 195 M*/ 196 197 PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v) 198 { 199 PetscViewer_ExodusII *exo; 200 201 PetscFunctionBegin; 202 PetscCall(PetscNewLog(v,&exo)); 203 204 v->data = (void*) exo; 205 v->ops->destroy = PetscViewerDestroy_ExodusII; 206 v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII; 207 v->ops->setup = PetscViewerSetUp_ExodusII; 208 v->ops->view = PetscViewerView_ExodusII; 209 v->ops->flush = 0; 210 exo->btype = (PetscFileMode) -1; 211 exo->filename = 0; 212 exo->exoid = -1; 213 214 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetName_C",PetscViewerFileSetName_ExodusII)); 215 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetName_C",PetscViewerFileGetName_ExodusII)); 216 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetMode_C",PetscViewerFileSetMode_ExodusII)); 217 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetMode_C",PetscViewerFileGetMode_ExodusII)); 218 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetId_C",PetscViewerExodusIIGetId_ExodusII)); 219 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerSetOrder_C",PetscViewerExodusIISetOrder_ExodusII)); 220 PetscCall(PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetOrder_C",PetscViewerExodusIIGetOrder_ExodusII)); 221 PetscFunctionReturn(0); 222 } 223 224 /* 225 EXOGetVarIndex - Locate a result in an exodus file based on its name 226 227 Collective 228 229 Input Parameters: 230 + exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 231 . obj_type - the type of entity for instance EX_NODAL, EX_ELEM_BLOCK 232 - name - the name of the result 233 234 Output Parameters: 235 . varIndex - the location in the exodus file of the result 236 237 Notes: 238 The exodus variable index is obtained by comparing name and the 239 names of zonal variables declared in the exodus file. For instance if name is "V" 240 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 241 amongst all variables of type obj_type. 242 243 Level: beginner 244 245 .seealso: `EXOGetVarIndex(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadNodal_PlexEXO(),VecLoadZonal_PlexEXO()` 246 */ 247 PetscErrorCode EXOGetVarIndex_Internal(int exoid, ex_entity_type obj_type, const char name[], int *varIndex) 248 { 249 int num_vars, i, j; 250 char ext_name[MAX_STR_LENGTH+1], var_name[MAX_STR_LENGTH+1]; 251 const int num_suffix = 5; 252 char *suffix[5]; 253 PetscBool flg; 254 255 PetscFunctionBegin; 256 suffix[0] = (char *) ""; 257 suffix[1] = (char *) "_X"; 258 suffix[2] = (char *) "_XX"; 259 suffix[3] = (char *) "_1"; 260 suffix[4] = (char *) "_11"; 261 262 *varIndex = -1; 263 PetscStackCallStandard(ex_get_variable_param,exoid, obj_type, &num_vars); 264 for (i = 0; i < num_vars; ++i) { 265 PetscStackCallStandard(ex_get_variable_name,exoid, obj_type, i+1, var_name); 266 for (j = 0; j < num_suffix; ++j) { 267 PetscCall(PetscStrncpy(ext_name, name, MAX_STR_LENGTH)); 268 PetscCall(PetscStrlcat(ext_name, suffix[j], MAX_STR_LENGTH)); 269 PetscCall(PetscStrcasecmp(ext_name, var_name, &flg)); 270 if (flg) { 271 *varIndex = i+1; 272 } 273 } 274 } 275 PetscFunctionReturn(0); 276 } 277 278 /* 279 DMView_PlexExodusII - Write a DM to disk in exodus format 280 281 Collective on dm 282 283 Input Parameters: 284 + dm - The dm to be written 285 . viewer - an exodusII viewer 286 287 Notes: 288 Not all DM can be written to disk this way. For instance, exodus assume that element blocks (mapped to "Cell sets" labels) 289 consists of sequentially numbered cells. If this is not the case, the exodus file will be corrupted. 290 291 If the dm has been distributed, only the part of the DM on MPI rank 0 (including "ghost" cells and vertices) 292 will be written. 293 294 DMPlex only represents geometry while most post-processing software expect that a mesh also provides information 295 on the discretization space. This function assumes that the file represents Lagrange finite elements of order 1 or 2. 296 The order of the mesh shall be set using PetscViewerExodusIISetOrder 297 It should be extended to use PetscFE objects. 298 299 This function will only handle TRI, TET, QUAD, and HEX cells. 300 Level: beginner 301 302 .seealso: 303 */ 304 PetscErrorCode DMView_PlexExodusII(DM dm, PetscViewer viewer) 305 { 306 enum ElemType {TRI, QUAD, TET, HEX}; 307 MPI_Comm comm; 308 PetscInt degree; /* the order of the mesh */ 309 /* Connectivity Variables */ 310 PetscInt cellsNotInConnectivity; 311 /* Cell Sets */ 312 DMLabel csLabel; 313 IS csIS; 314 const PetscInt *csIdx; 315 PetscInt num_cs, cs; 316 enum ElemType *type; 317 PetscBool hasLabel; 318 /* Coordinate Variables */ 319 DM cdm; 320 PetscSection coordSection; 321 Vec coord; 322 PetscInt **nodes; 323 PetscInt depth, d, dim, skipCells = 0; 324 PetscInt pStart, pEnd, p, cStart, cEnd, numCells, vStart, vEnd, numVertices, eStart, eEnd, numEdges, fStart, fEnd, numFaces, numNodes; 325 PetscInt num_vs, num_fs; 326 PetscMPIInt rank, size; 327 const char *dmName; 328 PetscInt nodesTriP1[4] = {3,0,0,0}; 329 PetscInt nodesTriP2[4] = {3,3,0,0}; 330 PetscInt nodesQuadP1[4] = {4,0,0,0}; 331 PetscInt nodesQuadP2[4] = {4,4,0,1}; 332 PetscInt nodesTetP1[4] = {4,0,0,0}; 333 PetscInt nodesTetP2[4] = {4,6,0,0}; 334 PetscInt nodesHexP1[4] = {8,0,0,0}; 335 PetscInt nodesHexP2[4] = {8,12,6,1}; 336 int CPU_word_size, IO_word_size, EXO_mode; 337 float EXO_version; 338 339 PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data; 340 341 PetscFunctionBegin; 342 PetscCall(PetscObjectGetComm((PetscObject)dm, &comm)); 343 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 344 PetscCallMPI(MPI_Comm_size(comm, &size)); 345 346 /* 347 Creating coordSection is a collective operation so we do it somewhat out of sequence 348 */ 349 PetscCall(PetscSectionCreate(comm, &coordSection)); 350 PetscCall(DMGetCoordinatesLocalSetUp(dm)); 351 if (!rank) { 352 switch (exo->btype) { 353 case FILE_MODE_READ: 354 case FILE_MODE_APPEND: 355 case FILE_MODE_UPDATE: 356 case FILE_MODE_APPEND_UPDATE: 357 /* exodusII does not allow writing geometry to an existing file */ 358 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "cannot add geometry to existing file %s", exo->filename); 359 case FILE_MODE_WRITE: 360 /* Create an empty file if one already exists*/ 361 EXO_mode = EX_CLOBBER; 362 #if defined(PETSC_USE_64BIT_INDICES) 363 EXO_mode += EX_ALL_INT64_API; 364 #endif 365 CPU_word_size = sizeof(PetscReal); 366 IO_word_size = sizeof(PetscReal); 367 exo->exoid = ex_create(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size); 368 PetscCheck(exo->exoid >= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", exo->filename); 369 370 break; 371 default: 372 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()"); 373 } 374 375 /* --- Get DM info --- */ 376 PetscCall(PetscObjectGetName((PetscObject) dm, &dmName)); 377 PetscCall(DMPlexGetDepth(dm, &depth)); 378 PetscCall(DMGetDimension(dm, &dim)); 379 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); 380 PetscCall(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd)); 381 PetscCall(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd)); 382 PetscCall(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd)); 383 PetscCall(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd)); 384 numCells = cEnd - cStart; 385 numEdges = eEnd - eStart; 386 numVertices = vEnd - vStart; 387 if (depth == 3) {numFaces = fEnd - fStart;} 388 else {numFaces = 0;} 389 PetscCall(DMGetLabelSize(dm, "Cell Sets", &num_cs)); 390 PetscCall(DMGetLabelSize(dm, "Vertex Sets", &num_vs)); 391 PetscCall(DMGetLabelSize(dm, "Face Sets", &num_fs)); 392 PetscCall(DMGetCoordinatesLocal(dm, &coord)); 393 PetscCall(DMGetCoordinateDM(dm, &cdm)); 394 if (num_cs > 0) { 395 PetscCall(DMGetLabel(dm, "Cell Sets", &csLabel)); 396 PetscCall(DMLabelGetValueIS(csLabel, &csIS)); 397 PetscCall(ISGetIndices(csIS, &csIdx)); 398 } 399 PetscCall(PetscMalloc1(num_cs, &nodes)); 400 /* Set element type for each block and compute total number of nodes */ 401 PetscCall(PetscMalloc1(num_cs, &type)); 402 numNodes = numVertices; 403 404 PetscCall(PetscViewerExodusIIGetOrder(viewer, °ree)); 405 if (degree == 2) {numNodes += numEdges;} 406 cellsNotInConnectivity = numCells; 407 for (cs = 0; cs < num_cs; ++cs) { 408 IS stratumIS; 409 const PetscInt *cells; 410 PetscScalar *xyz = NULL; 411 PetscInt csSize, closureSize; 412 413 PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS)); 414 PetscCall(ISGetIndices(stratumIS, &cells)); 415 PetscCall(ISGetSize(stratumIS, &csSize)); 416 PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz)); 417 switch (dim) { 418 case 2: 419 if (closureSize == 3*dim) {type[cs] = TRI;} 420 else if (closureSize == 4*dim) {type[cs] = QUAD;} 421 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize/dim, dim); 422 break; 423 case 3: 424 if (closureSize == 4*dim) {type[cs] = TET;} 425 else if (closureSize == 8*dim) {type[cs] = HEX;} 426 else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %" PetscInt_FMT " in dimension %" PetscInt_FMT " has no ExodusII type", closureSize/dim, dim); 427 break; 428 default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %" PetscInt_FMT " not handled by ExodusII viewer", dim); 429 } 430 if ((degree == 2) && (type[cs] == QUAD)) {numNodes += csSize;} 431 if ((degree == 2) && (type[cs] == HEX)) {numNodes += csSize; numNodes += numFaces;} 432 PetscCall(DMPlexVecRestoreClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz)); 433 /* Set nodes and Element type */ 434 if (type[cs] == TRI) { 435 if (degree == 1) nodes[cs] = nodesTriP1; 436 else if (degree == 2) nodes[cs] = nodesTriP2; 437 } else if (type[cs] == QUAD) { 438 if (degree == 1) nodes[cs] = nodesQuadP1; 439 else if (degree == 2) nodes[cs] = nodesQuadP2; 440 } else if (type[cs] == TET) { 441 if (degree == 1) nodes[cs] = nodesTetP1; 442 else if (degree == 2) nodes[cs] = nodesTetP2; 443 } else if (type[cs] == HEX) { 444 if (degree == 1) nodes[cs] = nodesHexP1; 445 else if (degree == 2) nodes[cs] = nodesHexP2; 446 } 447 /* Compute the number of cells not in the connectivity table */ 448 cellsNotInConnectivity -= nodes[cs][3]*csSize; 449 450 PetscCall(ISRestoreIndices(stratumIS, &cells)); 451 PetscCall(ISDestroy(&stratumIS)); 452 } 453 if (num_cs > 0) {PetscStackCallStandard(ex_put_init,exo->exoid, dmName, dim, numNodes, numCells, num_cs, num_vs, num_fs);} 454 /* --- Connectivity --- */ 455 for (cs = 0; cs < num_cs; ++cs) { 456 IS stratumIS; 457 const PetscInt *cells; 458 PetscInt *connect, off = 0; 459 PetscInt edgesInClosure = 0, facesInClosure = 0, verticesInClosure = 0; 460 PetscInt csSize, c, connectSize, closureSize; 461 char *elem_type = NULL; 462 char elem_type_tri3[] = "TRI3", elem_type_quad4[] = "QUAD4"; 463 char elem_type_tri6[] = "TRI6", elem_type_quad9[] = "QUAD9"; 464 char elem_type_tet4[] = "TET4", elem_type_hex8[] = "HEX8"; 465 char elem_type_tet10[] = "TET10", elem_type_hex27[] = "HEX27"; 466 467 PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS)); 468 PetscCall(ISGetIndices(stratumIS, &cells)); 469 PetscCall(ISGetSize(stratumIS, &csSize)); 470 /* Set Element type */ 471 if (type[cs] == TRI) { 472 if (degree == 1) elem_type = elem_type_tri3; 473 else if (degree == 2) elem_type = elem_type_tri6; 474 } else if (type[cs] == QUAD) { 475 if (degree == 1) elem_type = elem_type_quad4; 476 else if (degree == 2) elem_type = elem_type_quad9; 477 } else if (type[cs] == TET) { 478 if (degree == 1) elem_type = elem_type_tet4; 479 else if (degree == 2) elem_type = elem_type_tet10; 480 } else if (type[cs] == HEX) { 481 if (degree == 1) elem_type = elem_type_hex8; 482 else if (degree == 2) elem_type = elem_type_hex27; 483 } 484 connectSize = nodes[cs][0] + nodes[cs][1] + nodes[cs][2] + nodes[cs][3]; 485 PetscCall(PetscMalloc1(PetscMax(27,connectSize)*csSize, &connect)); 486 PetscStackCallStandard(ex_put_block,exo->exoid, EX_ELEM_BLOCK, csIdx[cs], elem_type, csSize, connectSize, 0, 0, 1); 487 /* Find number of vertices, edges, and faces in the closure */ 488 verticesInClosure = nodes[cs][0]; 489 if (depth > 1) { 490 if (dim == 2) { 491 PetscCall(DMPlexGetConeSize(dm, cells[0], &edgesInClosure)); 492 } else if (dim == 3) { 493 PetscInt *closure = NULL; 494 495 PetscCall(DMPlexGetConeSize(dm, cells[0], &facesInClosure)); 496 PetscCall(DMPlexGetTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure)); 497 edgesInClosure = closureSize - facesInClosure - 1 - verticesInClosure; 498 PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure)); 499 } 500 } 501 /* Get connectivity for each cell */ 502 for (c = 0; c < csSize; ++c) { 503 PetscInt *closure = NULL; 504 PetscInt temp, i; 505 506 PetscCall(DMPlexGetTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure)); 507 for (i = 0; i < connectSize; ++i) { 508 if (i < nodes[cs][0]) {/* Vertices */ 509 connect[i+off] = closure[(i+edgesInClosure+facesInClosure+1)*2] + 1; 510 connect[i+off] -= cellsNotInConnectivity; 511 } else if (i < nodes[cs][0]+nodes[cs][1]) { /* Edges */ 512 connect[i+off] = closure[(i-verticesInClosure+facesInClosure+1)*2] + 1; 513 if (nodes[cs][2] == 0) connect[i+off] -= numFaces; 514 connect[i+off] -= cellsNotInConnectivity; 515 } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]) { /* Cells */ 516 connect[i+off] = closure[0] + 1; 517 connect[i+off] -= skipCells; 518 } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]+nodes[cs][2]) { /* Faces */ 519 connect[i+off] = closure[(i-edgesInClosure-verticesInClosure)*2] + 1; 520 connect[i+off] -= cellsNotInConnectivity; 521 } else { 522 connect[i+off] = -1; 523 } 524 } 525 /* Tetrahedra are inverted */ 526 if (type[cs] == TET) { 527 temp = connect[0+off]; connect[0+off] = connect[1+off]; connect[1+off] = temp; 528 if (degree == 2) { 529 temp = connect[5+off]; connect[5+off] = connect[6+off]; connect[6+off] = temp; 530 temp = connect[7+off]; connect[7+off] = connect[8+off]; connect[8+off] = temp; 531 } 532 } 533 /* Hexahedra are inverted */ 534 if (type[cs] == HEX) { 535 temp = connect[1+off]; connect[1+off] = connect[3+off]; connect[3+off] = temp; 536 if (degree == 2) { 537 temp = connect[8+off]; connect[8+off] = connect[11+off]; connect[11+off] = temp; 538 temp = connect[9+off]; connect[9+off] = connect[10+off]; connect[10+off] = temp; 539 temp = connect[16+off]; connect[16+off] = connect[17+off]; connect[17+off] = temp; 540 temp = connect[18+off]; connect[18+off] = connect[19+off]; connect[19+off] = temp; 541 542 temp = connect[12+off]; connect[12+off] = connect[16+off]; connect[16+off] = temp; 543 temp = connect[13+off]; connect[13+off] = connect[17+off]; connect[17+off] = temp; 544 temp = connect[14+off]; connect[14+off] = connect[18+off]; connect[18+off] = temp; 545 temp = connect[15+off]; connect[15+off] = connect[19+off]; connect[19+off] = temp; 546 547 temp = connect[23+off]; connect[23+off] = connect[26+off]; connect[26+off] = temp; 548 temp = connect[24+off]; connect[24+off] = connect[25+off]; connect[25+off] = temp; 549 temp = connect[25+off]; connect[25+off] = connect[26+off]; connect[26+off] = temp; 550 } 551 } 552 off += connectSize; 553 PetscCall(DMPlexRestoreTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure)); 554 } 555 PetscStackCallStandard(ex_put_conn,exo->exoid, EX_ELEM_BLOCK, csIdx[cs], connect, 0, 0); 556 skipCells += (nodes[cs][3] == 0)*csSize; 557 PetscCall(PetscFree(connect)); 558 PetscCall(ISRestoreIndices(stratumIS, &cells)); 559 PetscCall(ISDestroy(&stratumIS)); 560 } 561 PetscCall(PetscFree(type)); 562 /* --- Coordinates --- */ 563 PetscCall(PetscSectionSetChart(coordSection, pStart, pEnd)); 564 if (num_cs) { 565 for (d = 0; d < depth; ++d) { 566 PetscCall(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd)); 567 for (p = pStart; p < pEnd; ++p) { 568 PetscCall(PetscSectionSetDof(coordSection, p, nodes[0][d] > 0)); 569 } 570 } 571 } 572 for (cs = 0; cs < num_cs; ++cs) { 573 IS stratumIS; 574 const PetscInt *cells; 575 PetscInt csSize, c; 576 577 PetscCall(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS)); 578 PetscCall(ISGetIndices(stratumIS, &cells)); 579 PetscCall(ISGetSize(stratumIS, &csSize)); 580 for (c = 0; c < csSize; ++c) { 581 PetscCall(PetscSectionSetDof(coordSection, cells[c], nodes[cs][3] > 0)); 582 } 583 PetscCall(ISRestoreIndices(stratumIS, &cells)); 584 PetscCall(ISDestroy(&stratumIS)); 585 } 586 if (num_cs > 0) { 587 PetscCall(ISRestoreIndices(csIS, &csIdx)); 588 PetscCall(ISDestroy(&csIS)); 589 } 590 PetscCall(PetscFree(nodes)); 591 PetscCall(PetscSectionSetUp(coordSection)); 592 if (numNodes > 0) { 593 const char *coordNames[3] = {"x", "y", "z"}; 594 PetscScalar *closure, *cval; 595 PetscReal *coords; 596 PetscInt hasDof, n = 0; 597 598 /* There can't be more than 24 values in the closure of a point for the coord coordSection */ 599 PetscCall(PetscCalloc3(numNodes*3, &coords, dim, &cval, 24, &closure)); 600 PetscCall(DMGetCoordinatesLocalNoncollective(dm, &coord)); 601 PetscCall(DMPlexGetChart(dm, &pStart, &pEnd)); 602 for (p = pStart; p < pEnd; ++p) { 603 PetscCall(PetscSectionGetDof(coordSection, p, &hasDof)); 604 if (hasDof) { 605 PetscInt closureSize = 24, j; 606 607 PetscCall(DMPlexVecGetClosure(cdm, NULL, coord, p, &closureSize, &closure)); 608 for (d = 0; d < dim; ++d) { 609 cval[d] = 0.0; 610 for (j = 0; j < closureSize/dim; j++) cval[d] += closure[j*dim+d]; 611 coords[d*numNodes+n] = PetscRealPart(cval[d]) * dim / closureSize; 612 } 613 ++n; 614 } 615 } 616 PetscStackCallStandard(ex_put_coord,exo->exoid, &coords[0*numNodes], &coords[1*numNodes], &coords[2*numNodes]); 617 PetscCall(PetscFree3(coords, cval, closure)); 618 PetscStackCallStandard(ex_put_coord_names,exo->exoid, (char **) coordNames); 619 } 620 621 /* --- Node Sets/Vertex Sets --- */ 622 DMHasLabel(dm, "Vertex Sets", &hasLabel); 623 if (hasLabel) { 624 PetscInt i, vs, vsSize; 625 const PetscInt *vsIdx, *vertices; 626 PetscInt *nodeList; 627 IS vsIS, stratumIS; 628 DMLabel vsLabel; 629 PetscCall(DMGetLabel(dm, "Vertex Sets", &vsLabel)); 630 PetscCall(DMLabelGetValueIS(vsLabel, &vsIS)); 631 PetscCall(ISGetIndices(vsIS, &vsIdx)); 632 for (vs=0; vs<num_vs; ++vs) { 633 PetscCall(DMLabelGetStratumIS(vsLabel, vsIdx[vs], &stratumIS)); 634 PetscCall(ISGetIndices(stratumIS, &vertices)); 635 PetscCall(ISGetSize(stratumIS, &vsSize)); 636 PetscCall(PetscMalloc1(vsSize, &nodeList)); 637 for (i=0; i<vsSize; ++i) { 638 nodeList[i] = vertices[i] - skipCells + 1; 639 } 640 PetscStackCallStandard(ex_put_set_param,exo->exoid, EX_NODE_SET, vsIdx[vs], vsSize, 0); 641 PetscStackCallStandard(ex_put_set,exo->exoid, EX_NODE_SET, vsIdx[vs], nodeList, NULL); 642 PetscCall(ISRestoreIndices(stratumIS, &vertices)); 643 PetscCall(ISDestroy(&stratumIS)); 644 PetscCall(PetscFree(nodeList)); 645 } 646 PetscCall(ISRestoreIndices(vsIS, &vsIdx)); 647 PetscCall(ISDestroy(&vsIS)); 648 } 649 /* --- Side Sets/Face Sets --- */ 650 PetscCall(DMHasLabel(dm, "Face Sets", &hasLabel)); 651 if (hasLabel) { 652 PetscInt i, j, fs, fsSize; 653 const PetscInt *fsIdx, *faces; 654 IS fsIS, stratumIS; 655 DMLabel fsLabel; 656 PetscInt numPoints, *points; 657 PetscInt elem_list_size = 0; 658 PetscInt *elem_list, *elem_ind, *side_list; 659 660 PetscCall(DMGetLabel(dm, "Face Sets", &fsLabel)); 661 /* Compute size of Node List and Element List */ 662 PetscCall(DMLabelGetValueIS(fsLabel, &fsIS)); 663 PetscCall(ISGetIndices(fsIS, &fsIdx)); 664 for (fs=0; fs<num_fs; ++fs) { 665 PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS)); 666 PetscCall(ISGetSize(stratumIS, &fsSize)); 667 elem_list_size += fsSize; 668 PetscCall(ISDestroy(&stratumIS)); 669 } 670 PetscCall(PetscMalloc3(num_fs, &elem_ind,elem_list_size, &elem_list,elem_list_size, &side_list)); 671 elem_ind[0] = 0; 672 for (fs=0; fs<num_fs; ++fs) { 673 PetscCall(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS)); 674 PetscCall(ISGetIndices(stratumIS, &faces)); 675 PetscCall(ISGetSize(stratumIS, &fsSize)); 676 /* Set Parameters */ 677 PetscStackCallStandard(ex_put_set_param,exo->exoid, EX_SIDE_SET, fsIdx[fs], fsSize, 0); 678 /* Indices */ 679 if (fs<num_fs-1) { 680 elem_ind[fs+1] = elem_ind[fs] + fsSize; 681 } 682 683 for (i=0; i<fsSize; ++i) { 684 /* Element List */ 685 points = NULL; 686 PetscCall(DMPlexGetTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points)); 687 elem_list[elem_ind[fs] + i] = points[2] +1; 688 PetscCall(DMPlexRestoreTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points)); 689 690 /* Side List */ 691 points = NULL; 692 PetscCall(DMPlexGetTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points)); 693 for (j=1; j<numPoints; ++j) { 694 if (points[j*2]==faces[i]) {break;} 695 } 696 /* Convert HEX sides */ 697 if (numPoints == 27) { 698 if (j == 1) {j = 5;} 699 else if (j == 2) {j = 6;} 700 else if (j == 3) {j = 1;} 701 else if (j == 4) {j = 3;} 702 else if (j == 5) {j = 2;} 703 else if (j == 6) {j = 4;} 704 } 705 /* Convert TET sides */ 706 if (numPoints == 15) { 707 --j; 708 if (j == 0) {j = 4;} 709 } 710 side_list[elem_ind[fs] + i] = j; 711 PetscCall(DMPlexRestoreTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points)); 712 713 } 714 PetscCall(ISRestoreIndices(stratumIS, &faces)); 715 PetscCall(ISDestroy(&stratumIS)); 716 } 717 PetscCall(ISRestoreIndices(fsIS, &fsIdx)); 718 PetscCall(ISDestroy(&fsIS)); 719 720 /* Put side sets */ 721 for (fs=0; fs<num_fs; ++fs) { 722 PetscStackCallStandard(ex_put_set,exo->exoid, EX_SIDE_SET, fsIdx[fs], &elem_list[elem_ind[fs]], &side_list[elem_ind[fs]]); 723 } 724 PetscCall(PetscFree3(elem_ind,elem_list,side_list)); 725 } 726 /* 727 close the exodus file 728 */ 729 ex_close(exo->exoid); 730 exo->exoid = -1; 731 } 732 PetscCall(PetscSectionDestroy(&coordSection)); 733 734 /* 735 reopen the file in parallel 736 */ 737 EXO_mode = EX_WRITE; 738 #if defined(PETSC_USE_64BIT_INDICES) 739 EXO_mode += EX_ALL_INT64_API; 740 #endif 741 CPU_word_size = sizeof(PetscReal); 742 IO_word_size = sizeof(PetscReal); 743 exo->exoid = ex_open_par(exo->filename,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,comm,MPI_INFO_NULL); 744 PetscCheck(exo->exoid >= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", exo->filename); 745 PetscFunctionReturn(0); 746 } 747 748 /* 749 VecView_PlexExodusII_Internal - Write a Vec corresponding in an exodus file 750 751 Collective on v 752 753 Input Parameters: 754 + v - The vector to be written 755 - viewer - The PetscViewerExodusII viewer associate to an exodus file 756 757 Notes: 758 The exodus result variable index is obtained by comparing the Vec name and the 759 names of variables declared in the exodus file. For instance for a Vec named "V" 760 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 761 amongst all variables. 762 In the event where a nodal and zonal variable both match, the function will return an error instead of 763 possibly corrupting the file 764 765 Level: beginner 766 767 .seealso: `EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII()` 768 @*/ 769 PetscErrorCode VecView_PlexExodusII_Internal(Vec v, PetscViewer viewer) 770 { 771 DM dm; 772 MPI_Comm comm; 773 PetscMPIInt rank; 774 775 int exoid,offsetN = 0, offsetZ = 0; 776 const char *vecname; 777 PetscInt step; 778 779 PetscFunctionBegin; 780 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 781 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 782 PetscCall(PetscViewerExodusIIGetId(viewer,&exoid)); 783 PetscCall(VecGetDM(v, &dm)); 784 PetscCall(PetscObjectGetName((PetscObject) v, &vecname)); 785 786 PetscCall(DMGetOutputSequenceNumber(dm,&step,NULL)); 787 PetscCall(EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN)); 788 PetscCall(EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ)); 789 PetscCheck(offsetN > 0 || offsetZ > 0,comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname); 790 if (offsetN > 0) { 791 PetscCall(VecViewPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN)); 792 } else if (offsetZ > 0) { 793 PetscCall(VecViewPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ)); 794 } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname); 795 PetscFunctionReturn(0); 796 } 797 798 /* 799 VecLoad_PlexExodusII_Internal - Write a Vec corresponding in an exodus file 800 801 Collective on v 802 803 Input Parameters: 804 + v - The vector to be written 805 - viewer - The PetscViewerExodusII viewer associate to an exodus file 806 807 Notes: 808 The exodus result variable index is obtained by comparing the Vec name and the 809 names of variables declared in the exodus file. For instance for a Vec named "V" 810 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 811 amongst all variables. 812 In the event where a nodal and zonal variable both match, the function will return an error instead of 813 possibly corrupting the file 814 815 Level: beginner 816 817 .seealso: `EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII()` 818 @*/ 819 PetscErrorCode VecLoad_PlexExodusII_Internal(Vec v, PetscViewer viewer) 820 { 821 DM dm; 822 MPI_Comm comm; 823 PetscMPIInt rank; 824 825 int exoid,offsetN = 0, offsetZ = 0; 826 const char *vecname; 827 PetscInt step; 828 829 PetscFunctionBegin; 830 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 831 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 832 PetscCall(PetscViewerExodusIIGetId(viewer,&exoid)); 833 PetscCall(VecGetDM(v, &dm)); 834 PetscCall(PetscObjectGetName((PetscObject) v, &vecname)); 835 836 PetscCall(DMGetOutputSequenceNumber(dm,&step,NULL)); 837 PetscCall(EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN)); 838 PetscCall(EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ)); 839 PetscCheck(offsetN > 0 || offsetZ > 0,comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname); 840 if (offsetN > 0) PetscCall(VecLoadPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN)); 841 else if (offsetZ > 0) PetscCall(VecLoadPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ)); 842 else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname); 843 PetscFunctionReturn(0); 844 } 845 846 /* 847 VecViewPlex_ExodusII_Nodal_Internal - Write a Vec corresponding to a nodal field to an exodus file 848 849 Collective on v 850 851 Input Parameters: 852 + v - The vector to be written 853 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 854 . step - the time step to write at (exodus steps are numbered starting from 1) 855 - offset - the location of the variable in the file 856 857 Notes: 858 The exodus result nodal variable index is obtained by comparing the Vec name and the 859 names of nodal variables declared in the exodus file. For instance for a Vec named "V" 860 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 861 amongst all nodal variables. 862 863 Level: beginner 864 865 .seealso: `EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecLoadNodal_PlexEXO(),VecViewZonal_PlexEXO(),VecLoadZonal_PlexEXO()` 866 @*/ 867 PetscErrorCode VecViewPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset) 868 { 869 MPI_Comm comm; 870 PetscMPIInt size; 871 DM dm; 872 Vec vNatural, vComp; 873 const PetscScalar *varray; 874 PetscInt xs, xe, bs; 875 PetscBool useNatural; 876 877 PetscFunctionBegin; 878 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 879 PetscCallMPI(MPI_Comm_size(comm, &size)); 880 PetscCall(VecGetDM(v, &dm)); 881 PetscCall(DMGetUseNatural(dm, &useNatural)); 882 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 883 if (useNatural) { 884 PetscCall(DMGetGlobalVector(dm, &vNatural)); 885 PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural)); 886 PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural)); 887 } else { 888 vNatural = v; 889 } 890 891 /* Write local chunk of the result in the exodus file 892 exodus stores each component of a vector-valued field as a separate variable. 893 We assume that they are stored sequentially */ 894 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 895 PetscCall(VecGetBlockSize(vNatural, &bs)); 896 if (bs == 1) { 897 PetscCall(VecGetArrayRead(vNatural, &varray)); 898 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray); 899 PetscCall(VecRestoreArrayRead(vNatural, &varray)); 900 } else { 901 IS compIS; 902 PetscInt c; 903 904 PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 905 for (c = 0; c < bs; ++c) { 906 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 907 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 908 PetscCall(VecGetArrayRead(vComp, &varray)); 909 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray); 910 PetscCall(VecRestoreArrayRead(vComp, &varray)); 911 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 912 } 913 PetscCall(ISDestroy(&compIS)); 914 } 915 if (useNatural) PetscCall(DMRestoreGlobalVector(dm, &vNatural)); 916 PetscFunctionReturn(0); 917 } 918 919 /* 920 VecLoadPlex_ExodusII_Nodal_Internal - Read a Vec corresponding to a nodal field from an exodus file 921 922 Collective on v 923 924 Input Parameters: 925 + v - The vector to be written 926 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 927 . step - the time step to read at (exodus steps are numbered starting from 1) 928 - offset - the location of the variable in the file 929 930 Notes: 931 The exodus result nodal variable index is obtained by comparing the Vec name and the 932 names of nodal variables declared in the exodus file. For instance for a Vec named "V" 933 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 934 amongst all nodal variables. 935 936 Level: beginner 937 938 .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecViewZonal_PlexEXO()`, `VecLoadZonal_PlexEXO()` 939 */ 940 PetscErrorCode VecLoadPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset) 941 { 942 MPI_Comm comm; 943 PetscMPIInt size; 944 DM dm; 945 Vec vNatural, vComp; 946 PetscScalar *varray; 947 PetscInt xs, xe, bs; 948 PetscBool useNatural; 949 950 PetscFunctionBegin; 951 PetscCall(PetscObjectGetComm((PetscObject) v, &comm)); 952 PetscCallMPI(MPI_Comm_size(comm, &size)); 953 PetscCall(VecGetDM(v,&dm)); 954 PetscCall(DMGetUseNatural(dm, &useNatural)); 955 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 956 if (useNatural) PetscCall(DMGetGlobalVector(dm,&vNatural)); 957 else {vNatural = v;} 958 959 /* Read local chunk from the file */ 960 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 961 PetscCall(VecGetBlockSize(vNatural, &bs)); 962 if (bs == 1) { 963 PetscCall(VecGetArray(vNatural, &varray)); 964 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray); 965 PetscCall(VecRestoreArray(vNatural, &varray)); 966 } else { 967 IS compIS; 968 PetscInt c; 969 970 PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 971 for (c = 0; c < bs; ++c) { 972 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 973 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 974 PetscCall(VecGetArray(vComp, &varray)); 975 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray); 976 PetscCall(VecRestoreArray(vComp, &varray)); 977 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 978 } 979 PetscCall(ISDestroy(&compIS)); 980 } 981 if (useNatural) { 982 PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v)); 983 PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v)); 984 PetscCall(DMRestoreGlobalVector(dm, &vNatural)); 985 } 986 PetscFunctionReturn(0); 987 } 988 989 /* 990 VecViewPlex_ExodusII_Zonal_Internal - Write a Vec corresponding to a zonal (cell based) field to an exodus file 991 992 Collective on v 993 994 Input Parameters: 995 + v - The vector to be written 996 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 997 . step - the time step to write at (exodus steps are numbered starting from 1) 998 - offset - the location of the variable in the file 999 1000 Notes: 1001 The exodus result zonal variable index is obtained by comparing the Vec name and the 1002 names of zonal variables declared in the exodus file. For instance for a Vec named "V" 1003 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 1004 amongst all zonal variables. 1005 1006 Level: beginner 1007 1008 .seealso: `EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Zonal_Internal()` 1009 */ 1010 PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset) 1011 { 1012 MPI_Comm comm; 1013 PetscMPIInt size; 1014 DM dm; 1015 Vec vNatural, vComp; 1016 const PetscScalar *varray; 1017 PetscInt xs, xe, bs; 1018 PetscBool useNatural; 1019 IS compIS; 1020 PetscInt *csSize, *csID; 1021 PetscInt numCS, set, csxs = 0; 1022 1023 PetscFunctionBegin; 1024 PetscCall(PetscObjectGetComm((PetscObject)v, &comm)); 1025 PetscCallMPI(MPI_Comm_size(comm, &size)); 1026 PetscCall(VecGetDM(v, &dm)); 1027 PetscCall(DMGetUseNatural(dm, &useNatural)); 1028 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 1029 if (useNatural) { 1030 PetscCall(DMGetGlobalVector(dm, &vNatural)); 1031 PetscCall(DMPlexGlobalToNaturalBegin(dm, v, vNatural)); 1032 PetscCall(DMPlexGlobalToNaturalEnd(dm, v, vNatural)); 1033 } else { 1034 vNatural = v; 1035 } 1036 1037 /* Write local chunk of the result in the exodus file 1038 exodus stores each component of a vector-valued field as a separate variable. 1039 We assume that they are stored sequentially 1040 Zonal variables are accessed one element block at a time, so we loop through the cell sets, 1041 but once the vector has been reordered to natural size, we cannot use the label information 1042 to figure out what to save where. */ 1043 numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK); 1044 PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize)); 1045 PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, csID); 1046 for (set = 0; set < numCS; ++set) { 1047 ex_block block; 1048 1049 block.id = csID[set]; 1050 block.type = EX_ELEM_BLOCK; 1051 PetscStackCallStandard(ex_get_block_param,exoid, &block); 1052 csSize[set] = block.num_entry; 1053 } 1054 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 1055 PetscCall(VecGetBlockSize(vNatural, &bs)); 1056 if (bs > 1) PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 1057 for (set = 0; set < numCS; set++) { 1058 PetscInt csLocalSize, c; 1059 1060 /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1 1061 local slice of zonal values: xs/bs,xm/bs-1 1062 intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */ 1063 csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs)); 1064 if (bs == 1) { 1065 PetscCall(VecGetArrayRead(vNatural, &varray)); 1066 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]); 1067 PetscCall(VecRestoreArrayRead(vNatural, &varray)); 1068 } else { 1069 for (c = 0; c < bs; ++c) { 1070 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 1071 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 1072 PetscCall(VecGetArrayRead(vComp, &varray)); 1073 PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]); 1074 PetscCall(VecRestoreArrayRead(vComp, &varray)); 1075 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 1076 } 1077 } 1078 csxs += csSize[set]; 1079 } 1080 PetscCall(PetscFree2(csID, csSize)); 1081 if (bs > 1) PetscCall(ISDestroy(&compIS)); 1082 if (useNatural) PetscCall(DMRestoreGlobalVector(dm,&vNatural)); 1083 PetscFunctionReturn(0); 1084 } 1085 1086 /* 1087 VecLoadPlex_ExodusII_Zonal_Internal - Read a Vec corresponding to a zonal (cell based) field from an exodus file 1088 1089 Collective on v 1090 1091 Input Parameters: 1092 + v - The vector to be written 1093 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance) 1094 . step - the time step to read at (exodus steps are numbered starting from 1) 1095 - offset - the location of the variable in the file 1096 1097 Notes: 1098 The exodus result zonal variable index is obtained by comparing the Vec name and the 1099 names of zonal variables declared in the exodus file. For instance for a Vec named "V" 1100 the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11" 1101 amongst all zonal variables. 1102 1103 Level: beginner 1104 1105 .seealso: `EXOGetVarIndex_Internal()`, `DMPlexView_ExodusII_Internal()`, `VecViewPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Nodal_Internal()`, `VecLoadPlex_ExodusII_Zonal_Internal()` 1106 */ 1107 PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset) 1108 { 1109 MPI_Comm comm; 1110 PetscMPIInt size; 1111 DM dm; 1112 Vec vNatural, vComp; 1113 PetscScalar *varray; 1114 PetscInt xs, xe, bs; 1115 PetscBool useNatural; 1116 IS compIS; 1117 PetscInt *csSize, *csID; 1118 PetscInt numCS, set, csxs = 0; 1119 1120 PetscFunctionBegin; 1121 PetscCall(PetscObjectGetComm((PetscObject)v,&comm)); 1122 PetscCallMPI(MPI_Comm_size(comm, &size)); 1123 PetscCall(VecGetDM(v, &dm)); 1124 PetscCall(DMGetUseNatural(dm, &useNatural)); 1125 useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE; 1126 if (useNatural) PetscCall(DMGetGlobalVector(dm,&vNatural)); 1127 else {vNatural = v;} 1128 1129 /* Read local chunk of the result in the exodus file 1130 exodus stores each component of a vector-valued field as a separate variable. 1131 We assume that they are stored sequentially 1132 Zonal variables are accessed one element block at a time, so we loop through the cell sets, 1133 but once the vector has been reordered to natural size, we cannot use the label information 1134 to figure out what to save where. */ 1135 numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK); 1136 PetscCall(PetscMalloc2(numCS, &csID, numCS, &csSize)); 1137 PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, csID); 1138 for (set = 0; set < numCS; ++set) { 1139 ex_block block; 1140 1141 block.id = csID[set]; 1142 block.type = EX_ELEM_BLOCK; 1143 PetscStackCallStandard(ex_get_block_param,exoid, &block); 1144 csSize[set] = block.num_entry; 1145 } 1146 PetscCall(VecGetOwnershipRange(vNatural, &xs, &xe)); 1147 PetscCall(VecGetBlockSize(vNatural, &bs)); 1148 if (bs > 1) PetscCall(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS)); 1149 for (set = 0; set < numCS; ++set) { 1150 PetscInt csLocalSize, c; 1151 1152 /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1 1153 local slice of zonal values: xs/bs,xm/bs-1 1154 intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */ 1155 csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs)); 1156 if (bs == 1) { 1157 PetscCall(VecGetArray(vNatural, &varray)); 1158 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]); 1159 PetscCall(VecRestoreArray(vNatural, &varray)); 1160 } else { 1161 for (c = 0; c < bs; ++c) { 1162 PetscCall(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs)); 1163 PetscCall(VecGetSubVector(vNatural, compIS, &vComp)); 1164 PetscCall(VecGetArray(vComp, &varray)); 1165 PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]); 1166 PetscCall(VecRestoreArray(vComp, &varray)); 1167 PetscCall(VecRestoreSubVector(vNatural, compIS, &vComp)); 1168 } 1169 } 1170 csxs += csSize[set]; 1171 } 1172 PetscCall(PetscFree2(csID, csSize)); 1173 if (bs > 1) PetscCall(ISDestroy(&compIS)); 1174 if (useNatural) { 1175 PetscCall(DMPlexNaturalToGlobalBegin(dm, vNatural, v)); 1176 PetscCall(DMPlexNaturalToGlobalEnd(dm, vNatural, v)); 1177 PetscCall(DMRestoreGlobalVector(dm, &vNatural)); 1178 } 1179 PetscFunctionReturn(0); 1180 } 1181 #endif 1182 1183 /*@ 1184 PetscViewerExodusIIGetId - Get the file id of the ExodusII file 1185 1186 Logically Collective on PetscViewer 1187 1188 Input Parameter: 1189 . viewer - the PetscViewer 1190 1191 Output Parameter: 1192 . exoid - The ExodusII file id 1193 1194 Level: intermediate 1195 1196 .seealso: `PetscViewerFileSetMode()`, `PetscViewerCreate()`, `PetscViewerSetType()`, `PetscViewerBinaryOpen()` 1197 @*/ 1198 PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid) 1199 { 1200 PetscFunctionBegin; 1201 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); 1202 PetscTryMethod(viewer, "PetscViewerGetId_C",(PetscViewer,int*),(viewer,exoid)); 1203 PetscFunctionReturn(0); 1204 } 1205 1206 /*@ 1207 PetscViewerExodusIISetOrder - Set the elements order in the exodusII file. 1208 1209 Collective 1210 1211 Input Parameters: 1212 + viewer - the viewer 1213 - order - elements order 1214 1215 Output Parameter: 1216 1217 Level: beginner 1218 1219 Note: 1220 1221 .seealso: `PetscViewerExodusIIGetId()`, `PetscViewerExodusIIGetOrder()`, `PetscViewerExodusIISetOrder()` 1222 @*/ 1223 PetscErrorCode PetscViewerExodusIISetOrder(PetscViewer viewer, PetscInt order) 1224 { 1225 PetscFunctionBegin; 1226 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); 1227 PetscTryMethod(viewer, "PetscViewerSetOrder_C",(PetscViewer,PetscInt),(viewer,order)); 1228 PetscFunctionReturn(0); 1229 } 1230 1231 /*@ 1232 PetscViewerExodusIIGetOrder - Get the elements order in the exodusII file. 1233 1234 Collective 1235 1236 Input Parameters: 1237 + viewer - the viewer 1238 - order - elements order 1239 1240 Output Parameter: 1241 1242 Level: beginner 1243 1244 Note: 1245 1246 .seealso: `PetscViewerExodusIIGetId()`, `PetscViewerExodusIIGetOrder()`, `PetscViewerExodusIISetOrder()` 1247 @*/ 1248 PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order) 1249 { 1250 PetscFunctionBegin; 1251 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1); 1252 PetscTryMethod(viewer, "PetscViewerGetOrder_C",(PetscViewer,PetscInt*),(viewer,order)); 1253 PetscFunctionReturn(0); 1254 } 1255 1256 /*@C 1257 PetscViewerExodusIIOpen - Opens a file for ExodusII input/output. 1258 1259 Collective 1260 1261 Input Parameters: 1262 + comm - MPI communicator 1263 . name - name of file 1264 - type - type of file 1265 $ FILE_MODE_WRITE - create new file for binary output 1266 $ FILE_MODE_READ - open existing file for binary input 1267 $ FILE_MODE_APPEND - open existing file for binary output 1268 1269 Output Parameter: 1270 . exo - PetscViewer for Exodus II input/output to use with the specified file 1271 1272 Level: beginner 1273 1274 Note: 1275 This PetscViewer should be destroyed with PetscViewerDestroy(). 1276 1277 .seealso: `PetscViewerPushFormat()`, `PetscViewerDestroy()`, 1278 `DMLoad()`, `PetscFileMode`, `PetscViewer`, `PetscViewerSetType()`, `PetscViewerFileSetMode()`, `PetscViewerFileSetName()` 1279 @*/ 1280 PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo) 1281 { 1282 PetscFunctionBegin; 1283 PetscCall(PetscViewerCreate(comm, exo)); 1284 PetscCall(PetscViewerSetType(*exo, PETSCVIEWEREXODUSII)); 1285 PetscCall(PetscViewerFileSetMode(*exo, type)); 1286 PetscCall(PetscViewerFileSetName(*exo, name)); 1287 PetscCall(PetscViewerSetFromOptions(*exo)); 1288 PetscFunctionReturn(0); 1289 } 1290 1291 /*@C 1292 DMPlexCreateExodusFromFile - Create a DMPlex mesh from an ExodusII file. 1293 1294 Collective 1295 1296 Input Parameters: 1297 + comm - The MPI communicator 1298 . filename - The name of the ExodusII file 1299 - interpolate - Create faces and edges in the mesh 1300 1301 Output Parameter: 1302 . dm - The DM object representing the mesh 1303 1304 Level: beginner 1305 1306 .seealso: `DMPLEX`, `DMCreate()`, `DMPlexCreateExodus()` 1307 @*/ 1308 PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm) 1309 { 1310 PetscMPIInt rank; 1311 #if defined(PETSC_HAVE_EXODUSII) 1312 int CPU_word_size = sizeof(PetscReal), IO_word_size = 0, exoid = -1; 1313 float version; 1314 #endif 1315 1316 PetscFunctionBegin; 1317 PetscValidCharPointer(filename, 2); 1318 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1319 #if defined(PETSC_HAVE_EXODUSII) 1320 if (rank == 0) { 1321 exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version); 1322 PetscCheck(exoid > 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open(\"%s\",...) did not return a valid file ID", filename); 1323 } 1324 PetscCall(DMPlexCreateExodus(comm, exoid, interpolate, dm)); 1325 if (rank == 0) {PetscStackCallStandard(ex_close,exoid);} 1326 PetscFunctionReturn(0); 1327 #else 1328 SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii"); 1329 #endif 1330 } 1331 1332 #if defined(PETSC_HAVE_EXODUSII) 1333 static PetscErrorCode ExodusGetCellType_Internal(const char *elem_type, DMPolytopeType *ct) 1334 { 1335 PetscBool flg; 1336 1337 PetscFunctionBegin; 1338 *ct = DM_POLYTOPE_UNKNOWN; 1339 PetscCall(PetscStrcmp(elem_type, "TRI", &flg)); 1340 if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;} 1341 PetscCall(PetscStrcmp(elem_type, "TRI3", &flg)); 1342 if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;} 1343 PetscCall(PetscStrcmp(elem_type, "QUAD", &flg)); 1344 if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;} 1345 PetscCall(PetscStrcmp(elem_type, "QUAD4", &flg)); 1346 if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;} 1347 PetscCall(PetscStrcmp(elem_type, "SHELL4", &flg)); 1348 if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;} 1349 PetscCall(PetscStrcmp(elem_type, "TETRA", &flg)); 1350 if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;} 1351 PetscCall(PetscStrcmp(elem_type, "TET4", &flg)); 1352 if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;} 1353 PetscCall(PetscStrcmp(elem_type, "WEDGE", &flg)); 1354 if (flg) {*ct = DM_POLYTOPE_TRI_PRISM; goto done;} 1355 PetscCall(PetscStrcmp(elem_type, "HEX", &flg)); 1356 if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;} 1357 PetscCall(PetscStrcmp(elem_type, "HEX8", &flg)); 1358 if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;} 1359 PetscCall(PetscStrcmp(elem_type, "HEXAHEDRON", &flg)); 1360 if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;} 1361 SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unrecognized element type %s", elem_type); 1362 done: 1363 PetscFunctionReturn(0); 1364 } 1365 #endif 1366 1367 /*@ 1368 DMPlexCreateExodus - Create a DMPlex mesh from an ExodusII file ID. 1369 1370 Collective 1371 1372 Input Parameters: 1373 + comm - The MPI communicator 1374 . exoid - The ExodusII id associated with a exodus file and obtained using ex_open 1375 - interpolate - Create faces and edges in the mesh 1376 1377 Output Parameter: 1378 . dm - The DM object representing the mesh 1379 1380 Level: beginner 1381 1382 .seealso: `DMPLEX`, `DMCreate()` 1383 @*/ 1384 PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool interpolate, DM *dm) 1385 { 1386 #if defined(PETSC_HAVE_EXODUSII) 1387 PetscMPIInt num_proc, rank; 1388 DMLabel cellSets = NULL, faceSets = NULL, vertSets = NULL; 1389 PetscSection coordSection; 1390 Vec coordinates; 1391 PetscScalar *coords; 1392 PetscInt coordSize, v; 1393 /* Read from ex_get_init() */ 1394 char title[PETSC_MAX_PATH_LEN+1]; 1395 int dim = 0, dimEmbed = 0, numVertices = 0, numCells = 0; 1396 int num_cs = 0, num_vs = 0, num_fs = 0; 1397 #endif 1398 1399 PetscFunctionBegin; 1400 #if defined(PETSC_HAVE_EXODUSII) 1401 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 1402 PetscCallMPI(MPI_Comm_size(comm, &num_proc)); 1403 PetscCall(DMCreate(comm, dm)); 1404 PetscCall(DMSetType(*dm, DMPLEX)); 1405 /* Open EXODUS II file and read basic information on rank 0, then broadcast to all processors */ 1406 if (rank == 0) { 1407 PetscCall(PetscMemzero(title,PETSC_MAX_PATH_LEN+1)); 1408 PetscStackCallStandard(ex_get_init,exoid, title, &dimEmbed, &numVertices, &numCells, &num_cs, &num_vs, &num_fs); 1409 PetscCheck(num_cs,PETSC_COMM_SELF,PETSC_ERR_SUP,"Exodus file does not contain any cell set"); 1410 } 1411 PetscCallMPI(MPI_Bcast(title, PETSC_MAX_PATH_LEN+1, MPI_CHAR, 0, comm)); 1412 PetscCallMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm)); 1413 PetscCall(PetscObjectSetName((PetscObject) *dm, title)); 1414 PetscCall(DMPlexSetChart(*dm, 0, numCells+numVertices)); 1415 /* We do not want this label automatically computed, instead we compute it here */ 1416 PetscCall(DMCreateLabel(*dm, "celltype")); 1417 1418 /* Read cell sets information */ 1419 if (rank == 0) { 1420 PetscInt *cone; 1421 int c, cs, ncs, c_loc, v, v_loc; 1422 /* Read from ex_get_elem_blk_ids() */ 1423 int *cs_id, *cs_order; 1424 /* Read from ex_get_elem_block() */ 1425 char buffer[PETSC_MAX_PATH_LEN+1]; 1426 int num_cell_in_set, num_vertex_per_cell, num_hybrid, num_attr; 1427 /* Read from ex_get_elem_conn() */ 1428 int *cs_connect; 1429 1430 /* Get cell sets IDs */ 1431 PetscCall(PetscMalloc2(num_cs, &cs_id, num_cs, &cs_order)); 1432 PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, cs_id); 1433 /* Read the cell set connectivity table and build mesh topology 1434 EXO standard requires that cells in cell sets be numbered sequentially and be pairwise disjoint. */ 1435 /* Check for a hybrid mesh */ 1436 for (cs = 0, num_hybrid = 0; cs < num_cs; ++cs) { 1437 DMPolytopeType ct; 1438 char elem_type[PETSC_MAX_PATH_LEN]; 1439 1440 PetscCall(PetscArrayzero(elem_type, sizeof(elem_type))); 1441 PetscStackCallStandard(ex_get_elem_type,exoid, cs_id[cs], elem_type); 1442 PetscCall(ExodusGetCellType_Internal(elem_type, &ct)); 1443 dim = PetscMax(dim, DMPolytopeTypeGetDim(ct)); 1444 PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr); 1445 switch (ct) { 1446 case DM_POLYTOPE_TRI_PRISM: 1447 cs_order[cs] = cs; 1448 ++num_hybrid; 1449 break; 1450 default: 1451 for (c = cs; c > cs-num_hybrid; --c) cs_order[c] = cs_order[c-1]; 1452 cs_order[cs-num_hybrid] = cs; 1453 } 1454 } 1455 /* First set sizes */ 1456 for (ncs = 0, c = 0; ncs < num_cs; ++ncs) { 1457 DMPolytopeType ct; 1458 char elem_type[PETSC_MAX_PATH_LEN]; 1459 const PetscInt cs = cs_order[ncs]; 1460 1461 PetscCall(PetscArrayzero(elem_type, sizeof(elem_type))); 1462 PetscStackCallStandard(ex_get_elem_type,exoid, cs_id[cs], elem_type); 1463 PetscCall(ExodusGetCellType_Internal(elem_type, &ct)); 1464 PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr); 1465 for (c_loc = 0; c_loc < num_cell_in_set; ++c_loc, ++c) { 1466 PetscCall(DMPlexSetConeSize(*dm, c, num_vertex_per_cell)); 1467 PetscCall(DMPlexSetCellType(*dm, c, ct)); 1468 } 1469 } 1470 for (v = numCells; v < numCells+numVertices; ++v) PetscCall(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT)); 1471 PetscCall(DMSetUp(*dm)); 1472 for (ncs = 0, c = 0; ncs < num_cs; ++ncs) { 1473 const PetscInt cs = cs_order[ncs]; 1474 PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr); 1475 PetscCall(PetscMalloc2(num_vertex_per_cell*num_cell_in_set,&cs_connect,num_vertex_per_cell,&cone)); 1476 PetscStackCallStandard(ex_get_conn,exoid, EX_ELEM_BLOCK, cs_id[cs], cs_connect,NULL,NULL); 1477 /* EXO uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */ 1478 for (c_loc = 0, v = 0; c_loc < num_cell_in_set; ++c_loc, ++c) { 1479 DMPolytopeType ct; 1480 1481 for (v_loc = 0; v_loc < num_vertex_per_cell; ++v_loc, ++v) { 1482 cone[v_loc] = cs_connect[v]+numCells-1; 1483 } 1484 PetscCall(DMPlexGetCellType(*dm, c, &ct)); 1485 PetscCall(DMPlexInvertCell(ct, cone)); 1486 PetscCall(DMPlexSetCone(*dm, c, cone)); 1487 PetscCall(DMSetLabelValue_Fast(*dm, &cellSets, "Cell Sets", c, cs_id[cs])); 1488 } 1489 PetscCall(PetscFree2(cs_connect,cone)); 1490 } 1491 PetscCall(PetscFree2(cs_id, cs_order)); 1492 } 1493 { 1494 PetscInt ints[] = {dim, dimEmbed}; 1495 1496 PetscCallMPI(MPI_Bcast(ints, 2, MPIU_INT, 0, comm)); 1497 PetscCall(DMSetDimension(*dm, ints[0])); 1498 PetscCall(DMSetCoordinateDim(*dm, ints[1])); 1499 dim = ints[0]; 1500 dimEmbed = ints[1]; 1501 } 1502 PetscCall(DMPlexSymmetrize(*dm)); 1503 PetscCall(DMPlexStratify(*dm)); 1504 if (interpolate) { 1505 DM idm; 1506 1507 PetscCall(DMPlexInterpolate(*dm, &idm)); 1508 PetscCall(DMDestroy(dm)); 1509 *dm = idm; 1510 } 1511 1512 /* Create vertex set label */ 1513 if (rank == 0 && (num_vs > 0)) { 1514 int vs, v; 1515 /* Read from ex_get_node_set_ids() */ 1516 int *vs_id; 1517 /* Read from ex_get_node_set_param() */ 1518 int num_vertex_in_set; 1519 /* Read from ex_get_node_set() */ 1520 int *vs_vertex_list; 1521 1522 /* Get vertex set ids */ 1523 PetscCall(PetscMalloc1(num_vs, &vs_id)); 1524 PetscStackCallStandard(ex_get_ids,exoid, EX_NODE_SET, vs_id); 1525 for (vs = 0; vs < num_vs; ++vs) { 1526 PetscStackCallStandard(ex_get_set_param,exoid, EX_NODE_SET, vs_id[vs], &num_vertex_in_set, NULL); 1527 PetscCall(PetscMalloc1(num_vertex_in_set, &vs_vertex_list)); 1528 PetscStackCallStandard(ex_get_set,exoid, EX_NODE_SET, vs_id[vs], vs_vertex_list, NULL); 1529 for (v = 0; v < num_vertex_in_set; ++v) { 1530 PetscCall(DMSetLabelValue_Fast(*dm, &vertSets, "Vertex Sets", vs_vertex_list[v]+numCells-1, vs_id[vs])); 1531 } 1532 PetscCall(PetscFree(vs_vertex_list)); 1533 } 1534 PetscCall(PetscFree(vs_id)); 1535 } 1536 /* Read coordinates */ 1537 PetscCall(DMGetCoordinateSection(*dm, &coordSection)); 1538 PetscCall(PetscSectionSetNumFields(coordSection, 1)); 1539 PetscCall(PetscSectionSetFieldComponents(coordSection, 0, dimEmbed)); 1540 PetscCall(PetscSectionSetChart(coordSection, numCells, numCells + numVertices)); 1541 for (v = numCells; v < numCells+numVertices; ++v) { 1542 PetscCall(PetscSectionSetDof(coordSection, v, dimEmbed)); 1543 PetscCall(PetscSectionSetFieldDof(coordSection, v, 0, dimEmbed)); 1544 } 1545 PetscCall(PetscSectionSetUp(coordSection)); 1546 PetscCall(PetscSectionGetStorageSize(coordSection, &coordSize)); 1547 PetscCall(VecCreate(PETSC_COMM_SELF, &coordinates)); 1548 PetscCall(PetscObjectSetName((PetscObject) coordinates, "coordinates")); 1549 PetscCall(VecSetSizes(coordinates, coordSize, PETSC_DETERMINE)); 1550 PetscCall(VecSetBlockSize(coordinates, dimEmbed)); 1551 PetscCall(VecSetType(coordinates,VECSTANDARD)); 1552 PetscCall(VecGetArray(coordinates, &coords)); 1553 if (rank == 0) { 1554 PetscReal *x, *y, *z; 1555 1556 PetscCall(PetscMalloc3(numVertices,&x,numVertices,&y,numVertices,&z)); 1557 PetscStackCallStandard(ex_get_coord,exoid, x, y, z); 1558 if (dimEmbed > 0) { 1559 for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+0] = x[v]; 1560 } 1561 if (dimEmbed > 1) { 1562 for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+1] = y[v]; 1563 } 1564 if (dimEmbed > 2) { 1565 for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+2] = z[v]; 1566 } 1567 PetscCall(PetscFree3(x,y,z)); 1568 } 1569 PetscCall(VecRestoreArray(coordinates, &coords)); 1570 PetscCall(DMSetCoordinatesLocal(*dm, coordinates)); 1571 PetscCall(VecDestroy(&coordinates)); 1572 1573 /* Create side set label */ 1574 if (rank == 0 && interpolate && (num_fs > 0)) { 1575 int fs, f, voff; 1576 /* Read from ex_get_side_set_ids() */ 1577 int *fs_id; 1578 /* Read from ex_get_side_set_param() */ 1579 int num_side_in_set; 1580 /* Read from ex_get_side_set_node_list() */ 1581 int *fs_vertex_count_list, *fs_vertex_list; 1582 /* Read side set labels */ 1583 char fs_name[MAX_STR_LENGTH+1]; 1584 1585 /* Get side set ids */ 1586 PetscCall(PetscMalloc1(num_fs, &fs_id)); 1587 PetscStackCallStandard(ex_get_ids,exoid, EX_SIDE_SET, fs_id); 1588 for (fs = 0; fs < num_fs; ++fs) { 1589 PetscStackCallStandard(ex_get_set_param,exoid, EX_SIDE_SET, fs_id[fs], &num_side_in_set, NULL); 1590 PetscCall(PetscMalloc2(num_side_in_set,&fs_vertex_count_list,num_side_in_set*4,&fs_vertex_list)); 1591 PetscStackCallStandard(ex_get_side_set_node_list,exoid, fs_id[fs], fs_vertex_count_list, fs_vertex_list); 1592 /* Get the specific name associated with this side set ID. */ 1593 int fs_name_err = ex_get_name(exoid, EX_SIDE_SET, fs_id[fs], fs_name); 1594 for (f = 0, voff = 0; f < num_side_in_set; ++f) { 1595 const PetscInt *faces = NULL; 1596 PetscInt faceSize = fs_vertex_count_list[f], numFaces; 1597 PetscInt faceVertices[4], v; 1598 1599 PetscCheck(faceSize <= 4,comm, PETSC_ERR_ARG_WRONG, "ExodusII side cannot have %d > 4 vertices", faceSize); 1600 for (v = 0; v < faceSize; ++v, ++voff) { 1601 faceVertices[v] = fs_vertex_list[voff]+numCells-1; 1602 } 1603 PetscCall(DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces)); 1604 PetscCheck(numFaces == 1,comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %d faces", f, fs, numFaces); 1605 PetscCall(DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs])); 1606 /* Only add the label if one has been detected for this side set. */ 1607 if (!fs_name_err) { 1608 PetscCall(DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs])); 1609 } 1610 PetscCall(DMPlexRestoreJoin(*dm, faceSize, faceVertices, &numFaces, &faces)); 1611 } 1612 PetscCall(PetscFree2(fs_vertex_count_list,fs_vertex_list)); 1613 } 1614 PetscCall(PetscFree(fs_id)); 1615 } 1616 1617 { /* Create Cell/Face/Vertex Sets labels at all processes */ 1618 enum {n = 3}; 1619 PetscBool flag[n]; 1620 1621 flag[0] = cellSets ? PETSC_TRUE : PETSC_FALSE; 1622 flag[1] = faceSets ? PETSC_TRUE : PETSC_FALSE; 1623 flag[2] = vertSets ? PETSC_TRUE : PETSC_FALSE; 1624 PetscCallMPI(MPI_Bcast(flag, n, MPIU_BOOL, 0, comm)); 1625 if (flag[0]) PetscCall(DMCreateLabel(*dm, "Cell Sets")); 1626 if (flag[1]) PetscCall(DMCreateLabel(*dm, "Face Sets")); 1627 if (flag[2]) PetscCall(DMCreateLabel(*dm, "Vertex Sets")); 1628 } 1629 PetscFunctionReturn(0); 1630 #else 1631 SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii"); 1632 #endif 1633 } 1634