xref: /petsc/src/dm/impls/plex/plexexodusii.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 #define PETSCDM_DLL
2 #include <petsc/private/dmpleximpl.h>    /*I   "petscdmplex.h"   I*/
3 
4 #if defined(PETSC_HAVE_EXODUSII)
5 #include <netcdf.h>
6 #include <exodusII.h>
7 #endif
8 
9 #include <petsc/private/viewerimpl.h>
10 #include <petsc/private/viewerexodusiiimpl.h>
11 #if defined(PETSC_HAVE_EXODUSII)
12 /*
13   PETSC_VIEWER_EXODUSII_ - Creates an ExodusII PetscViewer shared by all processors in a communicator.
14 
15   Collective
16 
17   Input Parameter:
18 . comm - the MPI communicator to share the ExodusII PetscViewer
19 
20   Level: intermediate
21 
22   Notes:
23     misses Fortran bindings
24 
25   Notes:
26   Unlike almost all other PETSc routines, PETSC_VIEWER_EXODUSII_ does not return
27   an error code.  The GLVIS PetscViewer is usually used in the form
28 $       XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm));
29 
30 .seealso: PetscViewerExodusIIOpen(), PetscViewerType, PetscViewerCreate(), PetscViewerDestroy()
31 */
32 PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm)
33 {
34   PetscViewer    viewer;
35   PetscErrorCode ierr;
36 
37   PetscFunctionBegin;
38   ierr = PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer);
39   if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);}
40   ierr = PetscObjectRegisterDestroy((PetscObject) viewer);
41   if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);}
42   PetscFunctionReturn(viewer);
43 }
44 
45 static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer)
46 {
47   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) v->data;
48 
49   PetscFunctionBegin;
50   if (exo->filename) CHKERRQ(PetscViewerASCIIPrintf(viewer, "Filename:    %s\n", exo->filename));
51   if (exo->exoid)    CHKERRQ(PetscViewerASCIIPrintf(viewer, "exoid:       %d\n", exo->exoid));
52   if (exo->btype)    CHKERRQ(PetscViewerASCIIPrintf(viewer, "IO Mode:     %d\n", exo->btype));
53   if (exo->order)    CHKERRQ(PetscViewerASCIIPrintf(viewer, "Mesh order:  %d\n", exo->order));
54   PetscFunctionReturn(0);
55 }
56 
57 static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscOptionItems *PetscOptionsObject, PetscViewer v)
58 {
59   PetscFunctionBegin;
60   CHKERRQ(PetscOptionsHead(PetscOptionsObject, "ExodusII PetscViewer Options"));
61   CHKERRQ(PetscOptionsTail());
62   PetscFunctionReturn(0);
63 }
64 
65 static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer)
66 {
67   PetscFunctionBegin;
68   PetscFunctionReturn(0);
69 }
70 
71 static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer)
72 {
73   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
74 
75   PetscFunctionBegin;
76   if (exo->exoid >= 0) {PetscStackCallStandard(ex_close,exo->exoid);}
77   CHKERRQ(PetscFree(exo->filename));
78   CHKERRQ(PetscFree(exo));
79   CHKERRQ(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetName_C",NULL));
80   CHKERRQ(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileGetName_C",NULL));
81   CHKERRQ(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetMode_C",NULL));
82   CHKERRQ(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetId_C",NULL));
83   CHKERRQ(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetOrder_C",NULL));
84   CHKERRQ(PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerSetOrder_C",NULL));
85   PetscFunctionReturn(0);
86 }
87 
88 static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[])
89 {
90   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
91   PetscMPIInt           rank;
92   int                   CPU_word_size, IO_word_size, EXO_mode;
93   MPI_Info              mpi_info = MPI_INFO_NULL;
94   float                 EXO_version;
95 
96   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank));
97   CPU_word_size = sizeof(PetscReal);
98   IO_word_size  = sizeof(PetscReal);
99 
100   PetscFunctionBegin;
101   if (exo->exoid >= 0) {
102     PetscStackCallStandard(ex_close,exo->exoid);
103     exo->exoid = -1;
104   }
105   if (exo->filename) CHKERRQ(PetscFree(exo->filename));
106   CHKERRQ(PetscStrallocpy(name, &exo->filename));
107   switch (exo->btype) {
108   case FILE_MODE_READ:
109     EXO_mode = EX_READ;
110     break;
111   case FILE_MODE_APPEND:
112   case FILE_MODE_UPDATE:
113   case FILE_MODE_APPEND_UPDATE:
114     /* Will fail if the file does not already exist */
115     EXO_mode = EX_WRITE;
116     break;
117   case FILE_MODE_WRITE:
118     /*
119       exodus only allows writing geometry upon file creation, so we will let DMView create the file.
120     */
121     PetscFunctionReturn(0);
122     break;
123   default:
124     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
125   }
126   #if defined(PETSC_USE_64BIT_INDICES)
127   EXO_mode += EX_ALL_INT64_API;
128   #endif
129   exo->exoid = ex_open_par(name,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,PETSC_COMM_WORLD,mpi_info);
130   PetscCheckFalse(exo->exoid < 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", name);
131   PetscFunctionReturn(0);
132 }
133 
134 static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name)
135 {
136   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
137 
138   PetscFunctionBegin;
139   *name = exo->filename;
140   PetscFunctionReturn(0);
141 }
142 
143 static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type)
144 {
145   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
146 
147   PetscFunctionBegin;
148   exo->btype = type;
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type)
153 {
154   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
155 
156   PetscFunctionBegin;
157   *type = exo->btype;
158   PetscFunctionReturn(0);
159 }
160 
161 static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid)
162 {
163   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
164 
165   PetscFunctionBegin;
166   *exoid = exo->exoid;
167   PetscFunctionReturn(0);
168 }
169 
170 static PetscErrorCode PetscViewerExodusIIGetOrder_ExodusII(PetscViewer viewer, PetscInt *order)
171 {
172   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
173 
174   PetscFunctionBegin;
175   *order = exo->order;
176   PetscFunctionReturn(0);
177 }
178 
179 static PetscErrorCode PetscViewerExodusIISetOrder_ExodusII(PetscViewer viewer, PetscInt order)
180 {
181   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
182 
183   PetscFunctionBegin;
184   exo->order = order;
185   PetscFunctionReturn(0);
186 }
187 
188 /*MC
189    PETSCVIEWEREXODUSII - A viewer that writes to an Exodus II file
190 
191 .seealso:  PetscViewerExodusIIOpen(), PetscViewerCreate(), PETSCVIEWERBINARY, PETSCVIEWERHDF5, DMView(),
192            PetscViewerFileSetName(), PetscViewerFileSetMode(), PetscViewerFormat, PetscViewerType, PetscViewerSetType()
193 
194   Level: beginner
195 M*/
196 
197 PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v)
198 {
199   PetscViewer_ExodusII *exo;
200 
201   PetscFunctionBegin;
202   CHKERRQ(PetscNewLog(v,&exo));
203 
204   v->data                = (void*) exo;
205   v->ops->destroy        = PetscViewerDestroy_ExodusII;
206   v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII;
207   v->ops->setup          = PetscViewerSetUp_ExodusII;
208   v->ops->view           = PetscViewerView_ExodusII;
209   v->ops->flush          = 0;
210   exo->btype             = (PetscFileMode) -1;
211   exo->filename          = 0;
212   exo->exoid             = -1;
213 
214   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetName_C",PetscViewerFileSetName_ExodusII));
215   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetName_C",PetscViewerFileGetName_ExodusII));
216   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetMode_C",PetscViewerFileSetMode_ExodusII));
217   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetMode_C",PetscViewerFileGetMode_ExodusII));
218   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetId_C",PetscViewerExodusIIGetId_ExodusII));
219   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerSetOrder_C",PetscViewerExodusIISetOrder_ExodusII));
220   CHKERRQ(PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetOrder_C",PetscViewerExodusIIGetOrder_ExodusII));
221   PetscFunctionReturn(0);
222 }
223 
224 /*
225   EXOGetVarIndex - Locate a result in an exodus file based on its name
226 
227   Collective
228 
229   Input Parameters:
230 + exoid    - the exodus id of a file (obtained from ex_open or ex_create for instance)
231 . obj_type - the type of entity for instance EX_NODAL, EX_ELEM_BLOCK
232 - name     - the name of the result
233 
234   Output Parameters:
235 . varIndex - the location in the exodus file of the result
236 
237   Notes:
238   The exodus variable index is obtained by comparing name and the
239   names of zonal variables declared in the exodus file. For instance if name is "V"
240   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
241   amongst all variables of type obj_type.
242 
243   Level: beginner
244 
245 .seealso: EXOGetVarIndex(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadNodal_PlexEXO(),VecLoadZonal_PlexEXO()
246 */
247 PetscErrorCode EXOGetVarIndex_Internal(int exoid, ex_entity_type obj_type, const char name[], int *varIndex)
248 {
249   int            num_vars, i, j;
250   char           ext_name[MAX_STR_LENGTH+1], var_name[MAX_STR_LENGTH+1];
251   const int      num_suffix = 5;
252   char          *suffix[5];
253   PetscBool      flg;
254 
255   PetscFunctionBegin;
256   suffix[0] = (char *) "";
257   suffix[1] = (char *) "_X";
258   suffix[2] = (char *) "_XX";
259   suffix[3] = (char *) "_1";
260   suffix[4] = (char *) "_11";
261 
262   *varIndex = -1;
263   PetscStackCallStandard(ex_get_variable_param,exoid, obj_type, &num_vars);
264   for (i = 0; i < num_vars; ++i) {
265     PetscStackCallStandard(ex_get_variable_name,exoid, obj_type, i+1, var_name);
266     for (j = 0; j < num_suffix; ++j) {
267       CHKERRQ(PetscStrncpy(ext_name, name, MAX_STR_LENGTH));
268       CHKERRQ(PetscStrlcat(ext_name, suffix[j], MAX_STR_LENGTH));
269       CHKERRQ(PetscStrcasecmp(ext_name, var_name, &flg));
270       if (flg) {
271         *varIndex = i+1;
272       }
273     }
274   }
275   PetscFunctionReturn(0);
276 }
277 
278 /*
279   DMView_PlexExodusII - Write a DM to disk in exodus format
280 
281   Collective on dm
282 
283   Input Parameters:
284 + dm  - The dm to be written
285 . viewer - an exodusII viewer
286 
287   Notes:
288   Not all DM can be written to disk this way. For instance, exodus assume that element blocks (mapped to "Cell sets" labels)
289   consists of sequentially numbered cells. If this is not the case, the exodus file will be corrupted.
290 
291   If the dm has been distributed, only the part of the DM on MPI rank 0 (including "ghost" cells and vertices)
292   will be written.
293 
294   DMPlex only represents geometry while most post-processing software expect that a mesh also provides information
295   on the discretization space. This function assumes that the file represents Lagrange finite elements of order 1 or 2.
296   The order of the mesh shall be set using PetscViewerExodusIISetOrder
297   It should be extended to use PetscFE objects.
298 
299   This function will only handle TRI, TET, QUAD, and HEX cells.
300   Level: beginner
301 
302 .seealso:
303 */
304 PetscErrorCode DMView_PlexExodusII(DM dm, PetscViewer viewer)
305 {
306   enum ElemType {TRI, QUAD, TET, HEX};
307   MPI_Comm        comm;
308   PetscInt        degree; /* the order of the mesh */
309   /* Connectivity Variables */
310   PetscInt        cellsNotInConnectivity;
311   /* Cell Sets */
312   DMLabel         csLabel;
313   IS              csIS;
314   const PetscInt *csIdx;
315   PetscInt        num_cs, cs;
316   enum ElemType  *type;
317   PetscBool       hasLabel;
318   /* Coordinate Variables */
319   DM              cdm;
320   PetscSection    coordSection;
321   Vec             coord;
322   PetscInt      **nodes;
323   PetscInt        depth, d, dim, skipCells = 0;
324   PetscInt        pStart, pEnd, p, cStart, cEnd, numCells, vStart, vEnd, numVertices, eStart, eEnd, numEdges, fStart, fEnd, numFaces, numNodes;
325   PetscInt        num_vs, num_fs;
326   PetscMPIInt     rank, size;
327   const char     *dmName;
328   PetscInt        nodesTriP1[4]  = {3,0,0,0};
329   PetscInt        nodesTriP2[4]  = {3,3,0,0};
330   PetscInt        nodesQuadP1[4] = {4,0,0,0};
331   PetscInt        nodesQuadP2[4] = {4,4,0,1};
332   PetscInt        nodesTetP1[4]  = {4,0,0,0};
333   PetscInt        nodesTetP2[4]  = {4,6,0,0};
334   PetscInt        nodesHexP1[4]  = {8,0,0,0};
335   PetscInt        nodesHexP2[4]  = {8,12,6,1};
336   PetscErrorCode  ierr;
337 
338   int             CPU_word_size, IO_word_size, EXO_mode;
339   float           EXO_version;
340 
341   PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
342 
343   PetscFunctionBegin;
344   CHKERRQ(PetscObjectGetComm((PetscObject)dm, &comm));
345   CHKERRMPI(MPI_Comm_rank(comm, &rank));
346   CHKERRMPI(MPI_Comm_size(comm, &size));
347 
348   /*
349     Creating coordSection is a collective operation so we do it somewhat out of sequence
350   */
351   CHKERRQ(PetscSectionCreate(comm, &coordSection));
352   CHKERRQ(DMGetCoordinatesLocalSetUp(dm));
353   if (!rank) {
354     switch (exo->btype) {
355     case FILE_MODE_READ:
356     case FILE_MODE_APPEND:
357     case FILE_MODE_UPDATE:
358     case FILE_MODE_APPEND_UPDATE:
359       /* exodusII does not allow writing geometry to an existing file */
360       SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "cannot add geometry to existing file %s", exo->filename);
361     case FILE_MODE_WRITE:
362       /* Create an empty file if one already exists*/
363       EXO_mode = EX_CLOBBER;
364 #if defined(PETSC_USE_64BIT_INDICES)
365       EXO_mode += EX_ALL_INT64_API;
366 #endif
367         CPU_word_size = sizeof(PetscReal);
368         IO_word_size  = sizeof(PetscReal);
369         exo->exoid = ex_create(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size);
370         PetscCheckFalse(exo->exoid < 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", exo->filename);
371 
372       break;
373     default:
374       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
375     }
376 
377     /* --- Get DM info --- */
378     CHKERRQ(PetscObjectGetName((PetscObject) dm, &dmName));
379     CHKERRQ(DMPlexGetDepth(dm, &depth));
380     CHKERRQ(DMGetDimension(dm, &dim));
381     CHKERRQ(DMPlexGetChart(dm, &pStart, &pEnd));
382     CHKERRQ(DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd));
383     CHKERRQ(DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd));
384     CHKERRQ(DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd));
385     CHKERRQ(DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd));
386     numCells    = cEnd - cStart;
387     numEdges    = eEnd - eStart;
388     numVertices = vEnd - vStart;
389     if (depth == 3) {numFaces = fEnd - fStart;}
390     else            {numFaces = 0;}
391     CHKERRQ(DMGetLabelSize(dm, "Cell Sets", &num_cs));
392     CHKERRQ(DMGetLabelSize(dm, "Vertex Sets", &num_vs));
393     CHKERRQ(DMGetLabelSize(dm, "Face Sets", &num_fs));
394     CHKERRQ(DMGetCoordinatesLocal(dm, &coord));
395     CHKERRQ(DMGetCoordinateDM(dm, &cdm));
396     if (num_cs > 0) {
397       CHKERRQ(DMGetLabel(dm, "Cell Sets", &csLabel));
398       CHKERRQ(DMLabelGetValueIS(csLabel, &csIS));
399       CHKERRQ(ISGetIndices(csIS, &csIdx));
400     }
401     CHKERRQ(PetscMalloc1(num_cs, &nodes));
402     /* Set element type for each block and compute total number of nodes */
403     CHKERRQ(PetscMalloc1(num_cs, &type));
404     numNodes = numVertices;
405 
406     CHKERRQ(PetscViewerExodusIIGetOrder(viewer, &degree));
407     if (degree == 2) {numNodes += numEdges;}
408     cellsNotInConnectivity = numCells;
409     for (cs = 0; cs < num_cs; ++cs) {
410       IS              stratumIS;
411       const PetscInt *cells;
412       PetscScalar    *xyz = NULL;
413       PetscInt        csSize, closureSize;
414 
415       CHKERRQ(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
416       CHKERRQ(ISGetIndices(stratumIS, &cells));
417       CHKERRQ(ISGetSize(stratumIS, &csSize));
418       CHKERRQ(DMPlexVecGetClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz));
419       switch (dim) {
420         case 2:
421           if      (closureSize == 3*dim) {type[cs] = TRI;}
422           else if (closureSize == 4*dim) {type[cs] = QUAD;}
423           else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %D in dimension %D has no ExodusII type", closureSize/dim, dim);
424           break;
425         case 3:
426           if      (closureSize == 4*dim) {type[cs] = TET;}
427           else if (closureSize == 8*dim) {type[cs] = HEX;}
428           else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %D in dimension %D has no ExodusII type", closureSize/dim, dim);
429           break;
430         default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %D not handled by ExodusII viewer", dim);
431       }
432       if ((degree == 2) && (type[cs] == QUAD)) {numNodes += csSize;}
433       if ((degree == 2) && (type[cs] == HEX))  {numNodes += csSize; numNodes += numFaces;}
434       CHKERRQ(DMPlexVecRestoreClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz));
435       /* Set nodes and Element type */
436       if (type[cs] == TRI) {
437         if      (degree == 1) nodes[cs] = nodesTriP1;
438         else if (degree == 2) nodes[cs] = nodesTriP2;
439       } else if (type[cs] == QUAD) {
440         if      (degree == 1) nodes[cs] = nodesQuadP1;
441         else if (degree == 2) nodes[cs] = nodesQuadP2;
442       } else if (type[cs] == TET) {
443         if      (degree == 1) nodes[cs] = nodesTetP1;
444         else if (degree == 2) nodes[cs] = nodesTetP2;
445       } else if (type[cs] == HEX) {
446         if      (degree == 1) nodes[cs] = nodesHexP1;
447         else if (degree == 2) nodes[cs] = nodesHexP2;
448       }
449       /* Compute the number of cells not in the connectivity table */
450       cellsNotInConnectivity -= nodes[cs][3]*csSize;
451 
452       CHKERRQ(ISRestoreIndices(stratumIS, &cells));
453       CHKERRQ(ISDestroy(&stratumIS));
454     }
455     if (num_cs > 0) {PetscStackCallStandard(ex_put_init,exo->exoid, dmName, dim, numNodes, numCells, num_cs, num_vs, num_fs);}
456     /* --- Connectivity --- */
457     for (cs = 0; cs < num_cs; ++cs) {
458       IS              stratumIS;
459       const PetscInt *cells;
460       PetscInt       *connect, off = 0;
461       PetscInt        edgesInClosure = 0, facesInClosure = 0, verticesInClosure = 0;
462       PetscInt        csSize, c, connectSize, closureSize;
463       char           *elem_type = NULL;
464       char            elem_type_tri3[]  = "TRI3",  elem_type_quad4[] = "QUAD4";
465       char            elem_type_tri6[]  = "TRI6",  elem_type_quad9[] = "QUAD9";
466       char            elem_type_tet4[]  = "TET4",  elem_type_hex8[]  = "HEX8";
467       char            elem_type_tet10[] = "TET10", elem_type_hex27[] = "HEX27";
468 
469       CHKERRQ(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
470       CHKERRQ(ISGetIndices(stratumIS, &cells));
471       CHKERRQ(ISGetSize(stratumIS, &csSize));
472       /* Set Element type */
473       if (type[cs] == TRI) {
474         if      (degree == 1) elem_type = elem_type_tri3;
475         else if (degree == 2) elem_type = elem_type_tri6;
476       } else if (type[cs] == QUAD) {
477         if      (degree == 1) elem_type = elem_type_quad4;
478         else if (degree == 2) elem_type = elem_type_quad9;
479       } else if (type[cs] == TET) {
480         if      (degree == 1) elem_type = elem_type_tet4;
481         else if (degree == 2) elem_type = elem_type_tet10;
482       } else if (type[cs] == HEX) {
483         if      (degree == 1) elem_type = elem_type_hex8;
484         else if (degree == 2) elem_type = elem_type_hex27;
485       }
486       connectSize = nodes[cs][0] + nodes[cs][1] + nodes[cs][2] + nodes[cs][3];
487       CHKERRQ(PetscMalloc1(PetscMax(27,connectSize)*csSize, &connect));
488       PetscStackCallStandard(ex_put_block,exo->exoid, EX_ELEM_BLOCK, csIdx[cs], elem_type, csSize, connectSize, 0, 0, 1);
489       /* Find number of vertices, edges, and faces in the closure */
490       verticesInClosure = nodes[cs][0];
491       if (depth > 1) {
492         if (dim == 2) {
493           CHKERRQ(DMPlexGetConeSize(dm, cells[0], &edgesInClosure));
494         } else if (dim == 3) {
495           PetscInt *closure = NULL;
496 
497           CHKERRQ(DMPlexGetConeSize(dm, cells[0], &facesInClosure));
498           CHKERRQ(DMPlexGetTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure));
499           edgesInClosure = closureSize - facesInClosure - 1 - verticesInClosure;
500           CHKERRQ(DMPlexRestoreTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure));
501         }
502       }
503       /* Get connectivity for each cell */
504       for (c = 0; c < csSize; ++c) {
505         PetscInt *closure = NULL;
506         PetscInt  temp, i;
507 
508         CHKERRQ(DMPlexGetTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure));
509         for (i = 0; i < connectSize; ++i) {
510           if (i < nodes[cs][0]) {/* Vertices */
511             connect[i+off] = closure[(i+edgesInClosure+facesInClosure+1)*2] + 1;
512             connect[i+off] -= cellsNotInConnectivity;
513           } else if (i < nodes[cs][0]+nodes[cs][1]) { /* Edges */
514             connect[i+off] = closure[(i-verticesInClosure+facesInClosure+1)*2] + 1;
515             if (nodes[cs][2] == 0) connect[i+off] -= numFaces;
516             connect[i+off] -= cellsNotInConnectivity;
517           } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]) { /* Cells */
518             connect[i+off] = closure[0] + 1;
519             connect[i+off] -= skipCells;
520           } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]+nodes[cs][2]) { /* Faces */
521             connect[i+off] = closure[(i-edgesInClosure-verticesInClosure)*2] + 1;
522             connect[i+off] -= cellsNotInConnectivity;
523           } else {
524             connect[i+off] = -1;
525           }
526         }
527         /* Tetrahedra are inverted */
528         if (type[cs] == TET) {
529           temp = connect[0+off]; connect[0+off] = connect[1+off]; connect[1+off] = temp;
530           if (degree == 2) {
531             temp = connect[5+off]; connect[5+off] = connect[6+off]; connect[6+off] = temp;
532             temp = connect[7+off]; connect[7+off] = connect[8+off]; connect[8+off] = temp;
533           }
534         }
535         /* Hexahedra are inverted */
536         if (type[cs] == HEX) {
537           temp = connect[1+off]; connect[1+off] = connect[3+off]; connect[3+off] = temp;
538           if (degree == 2) {
539             temp = connect[8+off];  connect[8+off]  = connect[11+off]; connect[11+off] = temp;
540             temp = connect[9+off];  connect[9+off]  = connect[10+off]; connect[10+off] = temp;
541             temp = connect[16+off]; connect[16+off] = connect[17+off]; connect[17+off] = temp;
542             temp = connect[18+off]; connect[18+off] = connect[19+off]; connect[19+off] = temp;
543 
544             temp = connect[12+off]; connect[12+off] = connect[16+off]; connect[16+off] = temp;
545             temp = connect[13+off]; connect[13+off] = connect[17+off]; connect[17+off] = temp;
546             temp = connect[14+off]; connect[14+off] = connect[18+off]; connect[18+off] = temp;
547             temp = connect[15+off]; connect[15+off] = connect[19+off]; connect[19+off] = temp;
548 
549             temp = connect[23+off]; connect[23+off] = connect[26+off]; connect[26+off] = temp;
550             temp = connect[24+off]; connect[24+off] = connect[25+off]; connect[25+off] = temp;
551             temp = connect[25+off]; connect[25+off] = connect[26+off]; connect[26+off] = temp;
552           }
553         }
554         off += connectSize;
555         CHKERRQ(DMPlexRestoreTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure));
556       }
557       PetscStackCallStandard(ex_put_conn,exo->exoid, EX_ELEM_BLOCK, csIdx[cs], connect, 0, 0);
558       skipCells += (nodes[cs][3] == 0)*csSize;
559       CHKERRQ(PetscFree(connect));
560       CHKERRQ(ISRestoreIndices(stratumIS, &cells));
561       CHKERRQ(ISDestroy(&stratumIS));
562     }
563     CHKERRQ(PetscFree(type));
564     /* --- Coordinates --- */
565     CHKERRQ(PetscSectionSetChart(coordSection, pStart, pEnd));
566     if (num_cs) {
567       for (d = 0; d < depth; ++d) {
568         CHKERRQ(DMPlexGetDepthStratum(dm, d, &pStart, &pEnd));
569         for (p = pStart; p < pEnd; ++p) {
570           CHKERRQ(PetscSectionSetDof(coordSection, p, nodes[0][d] > 0));
571         }
572       }
573     }
574     for (cs = 0; cs < num_cs; ++cs) {
575       IS              stratumIS;
576       const PetscInt *cells;
577       PetscInt        csSize, c;
578 
579       CHKERRQ(DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS));
580       CHKERRQ(ISGetIndices(stratumIS, &cells));
581       CHKERRQ(ISGetSize(stratumIS, &csSize));
582       for (c = 0; c < csSize; ++c) {
583         CHKERRQ(PetscSectionSetDof(coordSection, cells[c], nodes[cs][3] > 0));
584       }
585       CHKERRQ(ISRestoreIndices(stratumIS, &cells));
586       CHKERRQ(ISDestroy(&stratumIS));
587     }
588     if (num_cs > 0) {
589       CHKERRQ(ISRestoreIndices(csIS, &csIdx));
590       CHKERRQ(ISDestroy(&csIS));
591     }
592     CHKERRQ(PetscFree(nodes));
593     CHKERRQ(PetscSectionSetUp(coordSection));
594     if (numNodes > 0) {
595       const char  *coordNames[3] = {"x", "y", "z"};
596       PetscScalar *closure, *cval;
597       PetscReal   *coords;
598       PetscInt     hasDof, n = 0;
599 
600       /* There can't be more than 24 values in the closure of a point for the coord coordSection */
601       CHKERRQ(PetscCalloc3(numNodes*3, &coords, dim, &cval, 24, &closure));
602       CHKERRQ(DMGetCoordinatesLocalNoncollective(dm, &coord));
603       CHKERRQ(DMPlexGetChart(dm, &pStart, &pEnd));
604       for (p = pStart; p < pEnd; ++p) {
605         CHKERRQ(PetscSectionGetDof(coordSection, p, &hasDof));
606         if (hasDof) {
607           PetscInt closureSize = 24, j;
608 
609           CHKERRQ(DMPlexVecGetClosure(cdm, NULL, coord, p, &closureSize, &closure));
610           for (d = 0; d < dim; ++d) {
611             cval[d] = 0.0;
612             for (j = 0; j < closureSize/dim; j++) cval[d] += closure[j*dim+d];
613             coords[d*numNodes+n] = PetscRealPart(cval[d]) * dim / closureSize;
614           }
615           ++n;
616         }
617       }
618       PetscStackCallStandard(ex_put_coord,exo->exoid, &coords[0*numNodes], &coords[1*numNodes], &coords[2*numNodes]);
619       CHKERRQ(PetscFree3(coords, cval, closure));
620       PetscStackCallStandard(ex_put_coord_names,exo->exoid, (char **) coordNames);
621     }
622 
623     /* --- Node Sets/Vertex Sets --- */
624     DMHasLabel(dm, "Vertex Sets", &hasLabel);
625     if (hasLabel) {
626       PetscInt        i, vs, vsSize;
627       const PetscInt *vsIdx, *vertices;
628       PetscInt       *nodeList;
629       IS              vsIS, stratumIS;
630       DMLabel         vsLabel;
631       CHKERRQ(DMGetLabel(dm, "Vertex Sets", &vsLabel));
632       CHKERRQ(DMLabelGetValueIS(vsLabel, &vsIS));
633       CHKERRQ(ISGetIndices(vsIS, &vsIdx));
634       for (vs=0; vs<num_vs; ++vs) {
635         CHKERRQ(DMLabelGetStratumIS(vsLabel, vsIdx[vs], &stratumIS));
636         CHKERRQ(ISGetIndices(stratumIS, &vertices));
637         CHKERRQ(ISGetSize(stratumIS, &vsSize));
638         CHKERRQ(PetscMalloc1(vsSize, &nodeList));
639         for (i=0; i<vsSize; ++i) {
640           nodeList[i] = vertices[i] - skipCells + 1;
641         }
642         PetscStackCallStandard(ex_put_set_param,exo->exoid, EX_NODE_SET, vsIdx[vs], vsSize, 0);
643         PetscStackCallStandard(ex_put_set,exo->exoid, EX_NODE_SET, vsIdx[vs], nodeList, NULL);
644         CHKERRQ(ISRestoreIndices(stratumIS, &vertices));
645         CHKERRQ(ISDestroy(&stratumIS));
646         CHKERRQ(PetscFree(nodeList));
647       }
648       CHKERRQ(ISRestoreIndices(vsIS, &vsIdx));
649       CHKERRQ(ISDestroy(&vsIS));
650     }
651     /* --- Side Sets/Face Sets --- */
652     CHKERRQ(DMHasLabel(dm, "Face Sets", &hasLabel));
653     if (hasLabel) {
654       PetscInt        i, j, fs, fsSize;
655       const PetscInt *fsIdx, *faces;
656       IS              fsIS, stratumIS;
657       DMLabel         fsLabel;
658       PetscInt        numPoints, *points;
659       PetscInt        elem_list_size = 0;
660       PetscInt       *elem_list, *elem_ind, *side_list;
661 
662       CHKERRQ(DMGetLabel(dm, "Face Sets", &fsLabel));
663       /* Compute size of Node List and Element List */
664       CHKERRQ(DMLabelGetValueIS(fsLabel, &fsIS));
665       CHKERRQ(ISGetIndices(fsIS, &fsIdx));
666       for (fs=0; fs<num_fs; ++fs) {
667         CHKERRQ(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS));
668         CHKERRQ(ISGetSize(stratumIS, &fsSize));
669         elem_list_size += fsSize;
670         CHKERRQ(ISDestroy(&stratumIS));
671       }
672       ierr = PetscMalloc3(num_fs, &elem_ind,
673                           elem_list_size, &elem_list,
674                           elem_list_size, &side_list);CHKERRQ(ierr);
675       elem_ind[0] = 0;
676       for (fs=0; fs<num_fs; ++fs) {
677         CHKERRQ(DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS));
678         CHKERRQ(ISGetIndices(stratumIS, &faces));
679         CHKERRQ(ISGetSize(stratumIS, &fsSize));
680         /* Set Parameters */
681         PetscStackCallStandard(ex_put_set_param,exo->exoid, EX_SIDE_SET, fsIdx[fs], fsSize, 0);
682         /* Indices */
683         if (fs<num_fs-1) {
684           elem_ind[fs+1] = elem_ind[fs] + fsSize;
685         }
686 
687         for (i=0; i<fsSize; ++i) {
688           /* Element List */
689           points = NULL;
690           CHKERRQ(DMPlexGetTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points));
691           elem_list[elem_ind[fs] + i] = points[2] +1;
692           CHKERRQ(DMPlexRestoreTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points));
693 
694           /* Side List */
695           points = NULL;
696           CHKERRQ(DMPlexGetTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points));
697           for (j=1; j<numPoints; ++j) {
698             if (points[j*2]==faces[i]) {break;}
699           }
700           /* Convert HEX sides */
701           if (numPoints == 27) {
702             if      (j == 1) {j = 5;}
703             else if (j == 2) {j = 6;}
704             else if (j == 3) {j = 1;}
705             else if (j == 4) {j = 3;}
706             else if (j == 5) {j = 2;}
707             else if (j == 6) {j = 4;}
708           }
709           /* Convert TET sides */
710           if (numPoints == 15) {
711             --j;
712             if (j == 0) {j = 4;}
713           }
714           side_list[elem_ind[fs] + i] = j;
715           CHKERRQ(DMPlexRestoreTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points));
716 
717         }
718         CHKERRQ(ISRestoreIndices(stratumIS, &faces));
719         CHKERRQ(ISDestroy(&stratumIS));
720       }
721       CHKERRQ(ISRestoreIndices(fsIS, &fsIdx));
722       CHKERRQ(ISDestroy(&fsIS));
723 
724       /* Put side sets */
725       for (fs=0; fs<num_fs; ++fs) {
726         PetscStackCallStandard(ex_put_set,exo->exoid, EX_SIDE_SET, fsIdx[fs], &elem_list[elem_ind[fs]], &side_list[elem_ind[fs]]);
727       }
728       CHKERRQ(PetscFree3(elem_ind,elem_list,side_list));
729     }
730     /*
731       close the exodus file
732     */
733     ex_close(exo->exoid);
734     exo->exoid = -1;
735   }
736   CHKERRQ(PetscSectionDestroy(&coordSection));
737 
738   /*
739     reopen the file in parallel
740   */
741   EXO_mode = EX_WRITE;
742 #if defined(PETSC_USE_64BIT_INDICES)
743   EXO_mode += EX_ALL_INT64_API;
744 #endif
745   CPU_word_size = sizeof(PetscReal);
746   IO_word_size  = sizeof(PetscReal);
747   exo->exoid = ex_open_par(exo->filename,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,comm,MPI_INFO_NULL);
748   PetscCheckFalse(exo->exoid < 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", exo->filename);
749   PetscFunctionReturn(0);
750 }
751 
752 /*
753   VecView_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
754 
755   Collective on v
756 
757   Input Parameters:
758 + v  - The vector to be written
759 - viewer - The PetscViewerExodusII viewer associate to an exodus file
760 
761   Notes:
762   The exodus result variable index is obtained by comparing the Vec name and the
763   names of variables declared in the exodus file. For instance for a Vec named "V"
764   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
765   amongst all variables.
766   In the event where a nodal and zonal variable both match, the function will return an error instead of
767   possibly corrupting the file
768 
769   Level: beginner
770 
771 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII()
772 @*/
773 PetscErrorCode VecView_PlexExodusII_Internal(Vec v, PetscViewer viewer)
774 {
775   DM                 dm;
776   MPI_Comm           comm;
777   PetscMPIInt        rank;
778 
779   int                exoid,offsetN = 0, offsetZ = 0;
780   const char        *vecname;
781   PetscInt           step;
782 
783   PetscFunctionBegin;
784   CHKERRQ(PetscObjectGetComm((PetscObject) v, &comm));
785   CHKERRMPI(MPI_Comm_rank(comm, &rank));
786   CHKERRQ(PetscViewerExodusIIGetId(viewer,&exoid));
787   CHKERRQ(VecGetDM(v, &dm));
788   CHKERRQ(PetscObjectGetName((PetscObject) v, &vecname));
789 
790   CHKERRQ(DMGetOutputSequenceNumber(dm,&step,NULL));
791   CHKERRQ(EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN));
792   CHKERRQ(EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ));
793   PetscCheckFalse(offsetN <= 0 && offsetZ <= 0,comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname);
794   if (offsetN > 0) {
795     CHKERRQ(VecViewPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN));
796   } else if (offsetZ > 0) {
797     CHKERRQ(VecViewPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ));
798   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname);
799   PetscFunctionReturn(0);
800 }
801 
802 /*
803   VecLoad_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
804 
805   Collective on v
806 
807   Input Parameters:
808 + v  - The vector to be written
809 - viewer - The PetscViewerExodusII viewer associate to an exodus file
810 
811   Notes:
812   The exodus result variable index is obtained by comparing the Vec name and the
813   names of variables declared in the exodus file. For instance for a Vec named "V"
814   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
815   amongst all variables.
816   In the event where a nodal and zonal variable both match, the function will return an error instead of
817   possibly corrupting the file
818 
819   Level: beginner
820 
821 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII()
822 @*/
823 PetscErrorCode VecLoad_PlexExodusII_Internal(Vec v, PetscViewer viewer)
824 {
825   DM                 dm;
826   MPI_Comm           comm;
827   PetscMPIInt        rank;
828 
829   int                exoid,offsetN = 0, offsetZ = 0;
830   const char        *vecname;
831   PetscInt           step;
832 
833   PetscFunctionBegin;
834   CHKERRQ(PetscObjectGetComm((PetscObject) v, &comm));
835   CHKERRMPI(MPI_Comm_rank(comm, &rank));
836   CHKERRQ(PetscViewerExodusIIGetId(viewer,&exoid));
837   CHKERRQ(VecGetDM(v, &dm));
838   CHKERRQ(PetscObjectGetName((PetscObject) v, &vecname));
839 
840   CHKERRQ(DMGetOutputSequenceNumber(dm,&step,NULL));
841   CHKERRQ(EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN));
842   CHKERRQ(EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ));
843   PetscCheckFalse(offsetN <= 0 && offsetZ <= 0,comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. ", vecname);
844   if (offsetN > 0) {
845     CHKERRQ(VecLoadPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN));
846   } else if (offsetZ > 0) {
847     CHKERRQ(VecLoadPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ));
848   } else SETERRQ(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. ", vecname);
849   PetscFunctionReturn(0);
850 }
851 
852 /*
853   VecViewPlex_ExodusII_Nodal_Internal - Write a Vec corresponding to a nodal field to an exodus file
854 
855   Collective on v
856 
857   Input Parameters:
858 + v  - The vector to be written
859 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
860 . step - the time step to write at (exodus steps are numbered starting from 1)
861 - offset - the location of the variable in the file
862 
863   Notes:
864   The exodus result nodal variable index is obtained by comparing the Vec name and the
865   names of nodal variables declared in the exodus file. For instance for a Vec named "V"
866   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
867   amongst all nodal variables.
868 
869   Level: beginner
870 
871 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecLoadNodal_PlexEXO(),VecViewZonal_PlexEXO(),VecLoadZonal_PlexEXO()
872 @*/
873 PetscErrorCode VecViewPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
874 {
875   MPI_Comm           comm;
876   PetscMPIInt        size;
877   DM                 dm;
878   Vec                vNatural, vComp;
879   const PetscScalar *varray;
880   PetscInt           xs, xe, bs;
881   PetscBool          useNatural;
882 
883   PetscFunctionBegin;
884   CHKERRQ(PetscObjectGetComm((PetscObject) v, &comm));
885   CHKERRMPI(MPI_Comm_size(comm, &size));
886   CHKERRQ(VecGetDM(v, &dm));
887   CHKERRQ(DMGetUseNatural(dm, &useNatural));
888   useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
889   if (useNatural) {
890     CHKERRQ(DMGetGlobalVector(dm, &vNatural));
891     CHKERRQ(DMPlexGlobalToNaturalBegin(dm, v, vNatural));
892     CHKERRQ(DMPlexGlobalToNaturalEnd(dm, v, vNatural));
893   } else {
894     vNatural = v;
895   }
896 
897   /* Write local chunk of the result in the exodus file
898      exodus stores each component of a vector-valued field as a separate variable.
899      We assume that they are stored sequentially */
900   CHKERRQ(VecGetOwnershipRange(vNatural, &xs, &xe));
901   CHKERRQ(VecGetBlockSize(vNatural, &bs));
902   if (bs == 1) {
903     CHKERRQ(VecGetArrayRead(vNatural, &varray));
904     PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray);
905     CHKERRQ(VecRestoreArrayRead(vNatural, &varray));
906   } else {
907     IS       compIS;
908     PetscInt c;
909 
910     CHKERRQ(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS));
911     for (c = 0; c < bs; ++c) {
912       CHKERRQ(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs));
913       CHKERRQ(VecGetSubVector(vNatural, compIS, &vComp));
914       CHKERRQ(VecGetArrayRead(vComp, &varray));
915       PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray);
916       CHKERRQ(VecRestoreArrayRead(vComp, &varray));
917       CHKERRQ(VecRestoreSubVector(vNatural, compIS, &vComp));
918     }
919     CHKERRQ(ISDestroy(&compIS));
920   }
921   if (useNatural) CHKERRQ(DMRestoreGlobalVector(dm, &vNatural));
922   PetscFunctionReturn(0);
923 }
924 
925 /*
926   VecLoadPlex_ExodusII_Nodal_Internal - Read a Vec corresponding to a nodal field from an exodus file
927 
928   Collective on v
929 
930   Input Parameters:
931 + v  - The vector to be written
932 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
933 . step - the time step to read at (exodus steps are numbered starting from 1)
934 - offset - the location of the variable in the file
935 
936   Notes:
937   The exodus result nodal variable index is obtained by comparing the Vec name and the
938   names of nodal variables declared in the exodus file. For instance for a Vec named "V"
939   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
940   amongst all nodal variables.
941 
942   Level: beginner
943 
944 .seealso: EXOGetVarIndex_Internal(), DMPlexView_ExodusII_Internal(), VecViewPlex_ExodusII_Nodal_Internal(), VecViewZonal_PlexEXO(), VecLoadZonal_PlexEXO()
945 */
946 PetscErrorCode VecLoadPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
947 {
948   MPI_Comm       comm;
949   PetscMPIInt    size;
950   DM             dm;
951   Vec            vNatural, vComp;
952   PetscScalar   *varray;
953   PetscInt       xs, xe, bs;
954   PetscBool      useNatural;
955 
956   PetscFunctionBegin;
957   CHKERRQ(PetscObjectGetComm((PetscObject) v, &comm));
958   CHKERRMPI(MPI_Comm_size(comm, &size));
959   CHKERRQ(VecGetDM(v,&dm));
960   CHKERRQ(DMGetUseNatural(dm, &useNatural));
961   useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
962   if (useNatural) CHKERRQ(DMGetGlobalVector(dm,&vNatural));
963   else            {vNatural = v;}
964 
965   /* Read local chunk from the file */
966   CHKERRQ(VecGetOwnershipRange(vNatural, &xs, &xe));
967   CHKERRQ(VecGetBlockSize(vNatural, &bs));
968   if (bs == 1) {
969     CHKERRQ(VecGetArray(vNatural, &varray));
970     PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray);
971     CHKERRQ(VecRestoreArray(vNatural, &varray));
972   } else {
973     IS       compIS;
974     PetscInt c;
975 
976     CHKERRQ(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS));
977     for (c = 0; c < bs; ++c) {
978       CHKERRQ(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs));
979       CHKERRQ(VecGetSubVector(vNatural, compIS, &vComp));
980       CHKERRQ(VecGetArray(vComp, &varray));
981       PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray);
982       CHKERRQ(VecRestoreArray(vComp, &varray));
983       CHKERRQ(VecRestoreSubVector(vNatural, compIS, &vComp));
984     }
985     CHKERRQ(ISDestroy(&compIS));
986   }
987   if (useNatural) {
988     CHKERRQ(DMPlexNaturalToGlobalBegin(dm, vNatural, v));
989     CHKERRQ(DMPlexNaturalToGlobalEnd(dm, vNatural, v));
990     CHKERRQ(DMRestoreGlobalVector(dm, &vNatural));
991   }
992   PetscFunctionReturn(0);
993 }
994 
995 /*
996   VecViewPlex_ExodusII_Zonal_Internal - Write a Vec corresponding to a zonal (cell based) field to an exodus file
997 
998   Collective on v
999 
1000   Input Parameters:
1001 + v  - The vector to be written
1002 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1003 . step - the time step to write at (exodus steps are numbered starting from 1)
1004 - offset - the location of the variable in the file
1005 
1006   Notes:
1007   The exodus result zonal variable index is obtained by comparing the Vec name and the
1008   names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1009   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1010   amongst all zonal variables.
1011 
1012   Level: beginner
1013 
1014 .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Zonal_Internal()
1015 */
1016 PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1017 {
1018   MPI_Comm          comm;
1019   PetscMPIInt       size;
1020   DM                dm;
1021   Vec               vNatural, vComp;
1022   const PetscScalar *varray;
1023   PetscInt          xs, xe, bs;
1024   PetscBool         useNatural;
1025   IS                compIS;
1026   PetscInt         *csSize, *csID;
1027   PetscInt          numCS, set, csxs = 0;
1028 
1029   PetscFunctionBegin;
1030   CHKERRQ(PetscObjectGetComm((PetscObject)v, &comm));
1031   CHKERRMPI(MPI_Comm_size(comm, &size));
1032   CHKERRQ(VecGetDM(v, &dm));
1033   CHKERRQ(DMGetUseNatural(dm, &useNatural));
1034   useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1035   if (useNatural) {
1036     CHKERRQ(DMGetGlobalVector(dm, &vNatural));
1037     CHKERRQ(DMPlexGlobalToNaturalBegin(dm, v, vNatural));
1038     CHKERRQ(DMPlexGlobalToNaturalEnd(dm, v, vNatural));
1039   } else {
1040     vNatural = v;
1041   }
1042 
1043   /* Write local chunk of the result in the exodus file
1044      exodus stores each component of a vector-valued field as a separate variable.
1045      We assume that they are stored sequentially
1046      Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1047      but once the vector has been reordered to natural size, we cannot use the label information
1048      to figure out what to save where. */
1049   numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1050   CHKERRQ(PetscMalloc2(numCS, &csID, numCS, &csSize));
1051   PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, csID);
1052   for (set = 0; set < numCS; ++set) {
1053     ex_block block;
1054 
1055     block.id   = csID[set];
1056     block.type = EX_ELEM_BLOCK;
1057     PetscStackCallStandard(ex_get_block_param,exoid, &block);
1058     csSize[set] = block.num_entry;
1059   }
1060   CHKERRQ(VecGetOwnershipRange(vNatural, &xs, &xe));
1061   CHKERRQ(VecGetBlockSize(vNatural, &bs));
1062   if (bs > 1) CHKERRQ(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS));
1063   for (set = 0; set < numCS; set++) {
1064     PetscInt csLocalSize, c;
1065 
1066     /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1067        local slice of zonal values:         xs/bs,xm/bs-1
1068        intersection:                        max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1069     csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs));
1070     if (bs == 1) {
1071       CHKERRQ(VecGetArrayRead(vNatural, &varray));
1072       PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]);
1073       CHKERRQ(VecRestoreArrayRead(vNatural, &varray));
1074     } else {
1075       for (c = 0; c < bs; ++c) {
1076         CHKERRQ(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs));
1077         CHKERRQ(VecGetSubVector(vNatural, compIS, &vComp));
1078         CHKERRQ(VecGetArrayRead(vComp, &varray));
1079         PetscStackCallStandard(ex_put_partial_var,exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]);
1080         CHKERRQ(VecRestoreArrayRead(vComp, &varray));
1081         CHKERRQ(VecRestoreSubVector(vNatural, compIS, &vComp));
1082       }
1083     }
1084     csxs += csSize[set];
1085   }
1086   CHKERRQ(PetscFree2(csID, csSize));
1087   if (bs > 1) CHKERRQ(ISDestroy(&compIS));
1088   if (useNatural) CHKERRQ(DMRestoreGlobalVector(dm,&vNatural));
1089   PetscFunctionReturn(0);
1090 }
1091 
1092 /*
1093   VecLoadPlex_ExodusII_Zonal_Internal - Read a Vec corresponding to a zonal (cell based) field from an exodus file
1094 
1095   Collective on v
1096 
1097   Input Parameters:
1098 + v  - The vector to be written
1099 . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1100 . step - the time step to read at (exodus steps are numbered starting from 1)
1101 - offset - the location of the variable in the file
1102 
1103   Notes:
1104   The exodus result zonal variable index is obtained by comparing the Vec name and the
1105   names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1106   the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1107   amongst all zonal variables.
1108 
1109   Level: beginner
1110 
1111 .seealso: EXOGetVarIndex_Internal(), DMPlexView_ExodusII_Internal(), VecViewPlex_ExodusII_Nodal_Internal(), VecLoadPlex_ExodusII_Nodal_Internal(), VecLoadPlex_ExodusII_Zonal_Internal()
1112 */
1113 PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1114 {
1115   MPI_Comm          comm;
1116   PetscMPIInt       size;
1117   DM                dm;
1118   Vec               vNatural, vComp;
1119   PetscScalar      *varray;
1120   PetscInt          xs, xe, bs;
1121   PetscBool         useNatural;
1122   IS                compIS;
1123   PetscInt         *csSize, *csID;
1124   PetscInt          numCS, set, csxs = 0;
1125 
1126   PetscFunctionBegin;
1127   CHKERRQ(PetscObjectGetComm((PetscObject)v,&comm));
1128   CHKERRMPI(MPI_Comm_size(comm, &size));
1129   CHKERRQ(VecGetDM(v, &dm));
1130   CHKERRQ(DMGetUseNatural(dm, &useNatural));
1131   useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1132   if (useNatural) CHKERRQ(DMGetGlobalVector(dm,&vNatural));
1133   else            {vNatural = v;}
1134 
1135   /* Read local chunk of the result in the exodus file
1136      exodus stores each component of a vector-valued field as a separate variable.
1137      We assume that they are stored sequentially
1138      Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1139      but once the vector has been reordered to natural size, we cannot use the label information
1140      to figure out what to save where. */
1141   numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1142   CHKERRQ(PetscMalloc2(numCS, &csID, numCS, &csSize));
1143   PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, csID);
1144   for (set = 0; set < numCS; ++set) {
1145     ex_block block;
1146 
1147     block.id   = csID[set];
1148     block.type = EX_ELEM_BLOCK;
1149     PetscStackCallStandard(ex_get_block_param,exoid, &block);
1150     csSize[set] = block.num_entry;
1151   }
1152   CHKERRQ(VecGetOwnershipRange(vNatural, &xs, &xe));
1153   CHKERRQ(VecGetBlockSize(vNatural, &bs));
1154   if (bs > 1) CHKERRQ(ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS));
1155   for (set = 0; set < numCS; ++set) {
1156     PetscInt csLocalSize, c;
1157 
1158     /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1159        local slice of zonal values:         xs/bs,xm/bs-1
1160        intersection:                        max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1161     csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs));
1162     if (bs == 1) {
1163       CHKERRQ(VecGetArray(vNatural, &varray));
1164       PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]);
1165       CHKERRQ(VecRestoreArray(vNatural, &varray));
1166     } else {
1167       for (c = 0; c < bs; ++c) {
1168         CHKERRQ(ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs));
1169         CHKERRQ(VecGetSubVector(vNatural, compIS, &vComp));
1170         CHKERRQ(VecGetArray(vComp, &varray));
1171         PetscStackCallStandard(ex_get_partial_var,exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]);
1172         CHKERRQ(VecRestoreArray(vComp, &varray));
1173         CHKERRQ(VecRestoreSubVector(vNatural, compIS, &vComp));
1174       }
1175     }
1176     csxs += csSize[set];
1177   }
1178   CHKERRQ(PetscFree2(csID, csSize));
1179   if (bs > 1) CHKERRQ(ISDestroy(&compIS));
1180   if (useNatural) {
1181     CHKERRQ(DMPlexNaturalToGlobalBegin(dm, vNatural, v));
1182     CHKERRQ(DMPlexNaturalToGlobalEnd(dm, vNatural, v));
1183     CHKERRQ(DMRestoreGlobalVector(dm, &vNatural));
1184   }
1185   PetscFunctionReturn(0);
1186 }
1187 #endif
1188 
1189 /*@
1190   PetscViewerExodusIIGetId - Get the file id of the ExodusII file
1191 
1192   Logically Collective on PetscViewer
1193 
1194   Input Parameter:
1195 .  viewer - the PetscViewer
1196 
1197   Output Parameter:
1198 .  exoid - The ExodusII file id
1199 
1200   Level: intermediate
1201 
1202 .seealso: PetscViewerFileSetMode(), PetscViewerCreate(), PetscViewerSetType(), PetscViewerBinaryOpen()
1203 @*/
1204 PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid)
1205 {
1206   PetscFunctionBegin;
1207   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1);
1208   CHKERRQ(PetscTryMethod(viewer, "PetscViewerGetId_C",(PetscViewer,int*),(viewer,exoid)));
1209   PetscFunctionReturn(0);
1210 }
1211 
1212 /*@
1213    PetscViewerExodusIISetOrder - Set the elements order in the exodusII file.
1214 
1215    Collective
1216 
1217    Input Parameters:
1218 +  viewer - the viewer
1219 -  order - elements order
1220 
1221    Output Parameter:
1222 
1223    Level: beginner
1224 
1225    Note:
1226 
1227 .seealso: PetscViewerExodusIIGetId(), PetscViewerExodusIIGetOrder(), PetscViewerExodusIISetOrder()
1228 @*/
1229 PetscErrorCode PetscViewerExodusIISetOrder(PetscViewer viewer, PetscInt order)
1230 {
1231   PetscFunctionBegin;
1232   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1);
1233   CHKERRQ(PetscTryMethod(viewer, "PetscViewerSetOrder_C",(PetscViewer,PetscInt),(viewer,order)));
1234   PetscFunctionReturn(0);
1235 }
1236 
1237 /*@
1238    PetscViewerExodusIIGetOrder - Get the elements order in the exodusII file.
1239 
1240    Collective
1241 
1242    Input Parameters:
1243 +  viewer - the viewer
1244 -  order - elements order
1245 
1246    Output Parameter:
1247 
1248    Level: beginner
1249 
1250    Note:
1251 
1252 .seealso: PetscViewerExodusIIGetId(), PetscViewerExodusIIGetOrder(), PetscViewerExodusIISetOrder()
1253 @*/
1254 PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order)
1255 {
1256   PetscFunctionBegin;
1257   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 1);
1258   CHKERRQ(PetscTryMethod(viewer, "PetscViewerGetOrder_C",(PetscViewer,PetscInt*),(viewer,order)));
1259   PetscFunctionReturn(0);
1260 }
1261 
1262 /*@C
1263    PetscViewerExodusIIOpen - Opens a file for ExodusII input/output.
1264 
1265    Collective
1266 
1267    Input Parameters:
1268 +  comm - MPI communicator
1269 .  name - name of file
1270 -  type - type of file
1271 $    FILE_MODE_WRITE - create new file for binary output
1272 $    FILE_MODE_READ - open existing file for binary input
1273 $    FILE_MODE_APPEND - open existing file for binary output
1274 
1275    Output Parameter:
1276 .  exo - PetscViewer for Exodus II input/output to use with the specified file
1277 
1278    Level: beginner
1279 
1280    Note:
1281    This PetscViewer should be destroyed with PetscViewerDestroy().
1282 
1283 .seealso: PetscViewerPushFormat(), PetscViewerDestroy(),
1284           DMLoad(), PetscFileMode, PetscViewer, PetscViewerSetType(), PetscViewerFileSetMode(), PetscViewerFileSetName()
1285 @*/
1286 PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo)
1287 {
1288   PetscFunctionBegin;
1289   CHKERRQ(PetscViewerCreate(comm, exo));
1290   CHKERRQ(PetscViewerSetType(*exo, PETSCVIEWEREXODUSII));
1291   CHKERRQ(PetscViewerFileSetMode(*exo, type));
1292   CHKERRQ(PetscViewerFileSetName(*exo, name));
1293   CHKERRQ(PetscViewerSetFromOptions(*exo));
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 /*@C
1298   DMPlexCreateExodusFromFile - Create a DMPlex mesh from an ExodusII file.
1299 
1300   Collective
1301 
1302   Input Parameters:
1303 + comm  - The MPI communicator
1304 . filename - The name of the ExodusII file
1305 - interpolate - Create faces and edges in the mesh
1306 
1307   Output Parameter:
1308 . dm  - The DM object representing the mesh
1309 
1310   Level: beginner
1311 
1312 .seealso: DMPLEX, DMCreate(), DMPlexCreateExodus()
1313 @*/
1314 PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm)
1315 {
1316   PetscMPIInt    rank;
1317 #if defined(PETSC_HAVE_EXODUSII)
1318   int   CPU_word_size = sizeof(PetscReal), IO_word_size = 0, exoid = -1;
1319   float version;
1320 #endif
1321 
1322   PetscFunctionBegin;
1323   PetscValidCharPointer(filename, 2);
1324   CHKERRMPI(MPI_Comm_rank(comm, &rank));
1325 #if defined(PETSC_HAVE_EXODUSII)
1326   if (rank == 0) {
1327     exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version);
1328     PetscCheckFalse(exoid <= 0,PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open(\"%s\",...) did not return a valid file ID", filename);
1329   }
1330   CHKERRQ(DMPlexCreateExodus(comm, exoid, interpolate, dm));
1331   if (rank == 0) {PetscStackCallStandard(ex_close,exoid);}
1332   PetscFunctionReturn(0);
1333 #else
1334   SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1335 #endif
1336 }
1337 
1338 #if defined(PETSC_HAVE_EXODUSII)
1339 static PetscErrorCode ExodusGetCellType_Internal(const char *elem_type, DMPolytopeType *ct)
1340 {
1341   PetscBool      flg;
1342 
1343   PetscFunctionBegin;
1344   *ct = DM_POLYTOPE_UNKNOWN;
1345   CHKERRQ(PetscStrcmp(elem_type, "TRI", &flg));
1346   if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;}
1347   CHKERRQ(PetscStrcmp(elem_type, "TRI3", &flg));
1348   if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;}
1349   CHKERRQ(PetscStrcmp(elem_type, "QUAD", &flg));
1350   if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;}
1351   CHKERRQ(PetscStrcmp(elem_type, "QUAD4", &flg));
1352   if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;}
1353   CHKERRQ(PetscStrcmp(elem_type, "SHELL4", &flg));
1354   if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;}
1355   CHKERRQ(PetscStrcmp(elem_type, "TETRA", &flg));
1356   if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;}
1357   CHKERRQ(PetscStrcmp(elem_type, "TET4", &flg));
1358   if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;}
1359   CHKERRQ(PetscStrcmp(elem_type, "WEDGE", &flg));
1360   if (flg) {*ct = DM_POLYTOPE_TRI_PRISM; goto done;}
1361   CHKERRQ(PetscStrcmp(elem_type, "HEX", &flg));
1362   if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;}
1363   CHKERRQ(PetscStrcmp(elem_type, "HEX8", &flg));
1364   if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;}
1365   CHKERRQ(PetscStrcmp(elem_type, "HEXAHEDRON", &flg));
1366   if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;}
1367   SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unrecognized element type %s", elem_type);
1368   done:
1369   PetscFunctionReturn(0);
1370 }
1371 #endif
1372 
1373 /*@
1374   DMPlexCreateExodus - Create a DMPlex mesh from an ExodusII file ID.
1375 
1376   Collective
1377 
1378   Input Parameters:
1379 + comm  - The MPI communicator
1380 . exoid - The ExodusII id associated with a exodus file and obtained using ex_open
1381 - interpolate - Create faces and edges in the mesh
1382 
1383   Output Parameter:
1384 . dm  - The DM object representing the mesh
1385 
1386   Level: beginner
1387 
1388 .seealso: DMPLEX, DMCreate()
1389 @*/
1390 PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool interpolate, DM *dm)
1391 {
1392 #if defined(PETSC_HAVE_EXODUSII)
1393   PetscMPIInt    num_proc, rank;
1394   DMLabel        cellSets = NULL, faceSets = NULL, vertSets = NULL;
1395   PetscSection   coordSection;
1396   Vec            coordinates;
1397   PetscScalar    *coords;
1398   PetscInt       coordSize, v;
1399   /* Read from ex_get_init() */
1400   char title[PETSC_MAX_PATH_LEN+1];
1401   int  dim    = 0, dimEmbed = 0, numVertices = 0, numCells = 0;
1402   int  num_cs = 0, num_vs = 0, num_fs = 0;
1403 #endif
1404 
1405   PetscFunctionBegin;
1406 #if defined(PETSC_HAVE_EXODUSII)
1407   CHKERRMPI(MPI_Comm_rank(comm, &rank));
1408   CHKERRMPI(MPI_Comm_size(comm, &num_proc));
1409   CHKERRQ(DMCreate(comm, dm));
1410   CHKERRQ(DMSetType(*dm, DMPLEX));
1411   /* Open EXODUS II file and read basic information on rank 0, then broadcast to all processors */
1412   if (rank == 0) {
1413     CHKERRQ(PetscMemzero(title,PETSC_MAX_PATH_LEN+1));
1414     PetscStackCallStandard(ex_get_init,exoid, title, &dimEmbed, &numVertices, &numCells, &num_cs, &num_vs, &num_fs);
1415     PetscCheckFalse(!num_cs,PETSC_COMM_SELF,PETSC_ERR_SUP,"Exodus file does not contain any cell set");
1416   }
1417   CHKERRMPI(MPI_Bcast(title, PETSC_MAX_PATH_LEN+1, MPI_CHAR, 0, comm));
1418   CHKERRMPI(MPI_Bcast(&dim, 1, MPI_INT, 0, comm));
1419   CHKERRQ(PetscObjectSetName((PetscObject) *dm, title));
1420   CHKERRQ(DMPlexSetChart(*dm, 0, numCells+numVertices));
1421   /*   We do not want this label automatically computed, instead we compute it here */
1422   CHKERRQ(DMCreateLabel(*dm, "celltype"));
1423 
1424   /* Read cell sets information */
1425   if (rank == 0) {
1426     PetscInt *cone;
1427     int      c, cs, ncs, c_loc, v, v_loc;
1428     /* Read from ex_get_elem_blk_ids() */
1429     int *cs_id, *cs_order;
1430     /* Read from ex_get_elem_block() */
1431     char buffer[PETSC_MAX_PATH_LEN+1];
1432     int  num_cell_in_set, num_vertex_per_cell, num_hybrid, num_attr;
1433     /* Read from ex_get_elem_conn() */
1434     int *cs_connect;
1435 
1436     /* Get cell sets IDs */
1437     CHKERRQ(PetscMalloc2(num_cs, &cs_id, num_cs, &cs_order));
1438     PetscStackCallStandard(ex_get_ids,exoid, EX_ELEM_BLOCK, cs_id);
1439     /* Read the cell set connectivity table and build mesh topology
1440        EXO standard requires that cells in cell sets be numbered sequentially and be pairwise disjoint. */
1441     /* Check for a hybrid mesh */
1442     for (cs = 0, num_hybrid = 0; cs < num_cs; ++cs) {
1443       DMPolytopeType ct;
1444       char           elem_type[PETSC_MAX_PATH_LEN];
1445 
1446       CHKERRQ(PetscArrayzero(elem_type, sizeof(elem_type)));
1447       PetscStackCallStandard(ex_get_elem_type,exoid, cs_id[cs], elem_type);
1448       CHKERRQ(ExodusGetCellType_Internal(elem_type, &ct));
1449       dim  = PetscMax(dim, DMPolytopeTypeGetDim(ct));
1450       PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr);
1451       switch (ct) {
1452         case DM_POLYTOPE_TRI_PRISM:
1453           cs_order[cs] = cs;
1454           ++num_hybrid;
1455           break;
1456         default:
1457           for (c = cs; c > cs-num_hybrid; --c) cs_order[c] = cs_order[c-1];
1458           cs_order[cs-num_hybrid] = cs;
1459       }
1460     }
1461     /* First set sizes */
1462     for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1463       DMPolytopeType ct;
1464       char           elem_type[PETSC_MAX_PATH_LEN];
1465       const PetscInt cs = cs_order[ncs];
1466 
1467       CHKERRQ(PetscArrayzero(elem_type, sizeof(elem_type)));
1468       PetscStackCallStandard(ex_get_elem_type,exoid, cs_id[cs], elem_type);
1469       CHKERRQ(ExodusGetCellType_Internal(elem_type, &ct));
1470       PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr);
1471       for (c_loc = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1472         CHKERRQ(DMPlexSetConeSize(*dm, c, num_vertex_per_cell));
1473         CHKERRQ(DMPlexSetCellType(*dm, c, ct));
1474       }
1475     }
1476     for (v = numCells; v < numCells+numVertices; ++v) CHKERRQ(DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT));
1477     CHKERRQ(DMSetUp(*dm));
1478     for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1479       const PetscInt cs = cs_order[ncs];
1480       PetscStackCallStandard(ex_get_block,exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr);
1481       CHKERRQ(PetscMalloc2(num_vertex_per_cell*num_cell_in_set,&cs_connect,num_vertex_per_cell,&cone));
1482       PetscStackCallStandard(ex_get_conn,exoid, EX_ELEM_BLOCK, cs_id[cs], cs_connect,NULL,NULL);
1483       /* EXO uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
1484       for (c_loc = 0, v = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1485         DMPolytopeType ct;
1486 
1487         for (v_loc = 0; v_loc < num_vertex_per_cell; ++v_loc, ++v) {
1488           cone[v_loc] = cs_connect[v]+numCells-1;
1489         }
1490         CHKERRQ(DMPlexGetCellType(*dm, c, &ct));
1491         CHKERRQ(DMPlexInvertCell(ct, cone));
1492         CHKERRQ(DMPlexSetCone(*dm, c, cone));
1493         CHKERRQ(DMSetLabelValue_Fast(*dm, &cellSets, "Cell Sets", c, cs_id[cs]));
1494       }
1495       CHKERRQ(PetscFree2(cs_connect,cone));
1496     }
1497     CHKERRQ(PetscFree2(cs_id, cs_order));
1498   }
1499   {
1500     PetscInt ints[] = {dim, dimEmbed};
1501 
1502     CHKERRMPI(MPI_Bcast(ints, 2, MPIU_INT, 0, comm));
1503     CHKERRQ(DMSetDimension(*dm, ints[0]));
1504     CHKERRQ(DMSetCoordinateDim(*dm, ints[1]));
1505     dim      = ints[0];
1506     dimEmbed = ints[1];
1507   }
1508   CHKERRQ(DMPlexSymmetrize(*dm));
1509   CHKERRQ(DMPlexStratify(*dm));
1510   if (interpolate) {
1511     DM idm;
1512 
1513     CHKERRQ(DMPlexInterpolate(*dm, &idm));
1514     CHKERRQ(DMDestroy(dm));
1515     *dm  = idm;
1516   }
1517 
1518   /* Create vertex set label */
1519   if (rank == 0 && (num_vs > 0)) {
1520     int vs, v;
1521     /* Read from ex_get_node_set_ids() */
1522     int *vs_id;
1523     /* Read from ex_get_node_set_param() */
1524     int num_vertex_in_set;
1525     /* Read from ex_get_node_set() */
1526     int *vs_vertex_list;
1527 
1528     /* Get vertex set ids */
1529     CHKERRQ(PetscMalloc1(num_vs, &vs_id));
1530     PetscStackCallStandard(ex_get_ids,exoid, EX_NODE_SET, vs_id);
1531     for (vs = 0; vs < num_vs; ++vs) {
1532       PetscStackCallStandard(ex_get_set_param,exoid, EX_NODE_SET, vs_id[vs], &num_vertex_in_set, NULL);
1533       CHKERRQ(PetscMalloc1(num_vertex_in_set, &vs_vertex_list));
1534       PetscStackCallStandard(ex_get_set,exoid, EX_NODE_SET, vs_id[vs], vs_vertex_list, NULL);
1535       for (v = 0; v < num_vertex_in_set; ++v) {
1536         CHKERRQ(DMSetLabelValue_Fast(*dm, &vertSets, "Vertex Sets", vs_vertex_list[v]+numCells-1, vs_id[vs]));
1537       }
1538       CHKERRQ(PetscFree(vs_vertex_list));
1539     }
1540     CHKERRQ(PetscFree(vs_id));
1541   }
1542   /* Read coordinates */
1543   CHKERRQ(DMGetCoordinateSection(*dm, &coordSection));
1544   CHKERRQ(PetscSectionSetNumFields(coordSection, 1));
1545   CHKERRQ(PetscSectionSetFieldComponents(coordSection, 0, dimEmbed));
1546   CHKERRQ(PetscSectionSetChart(coordSection, numCells, numCells + numVertices));
1547   for (v = numCells; v < numCells+numVertices; ++v) {
1548     CHKERRQ(PetscSectionSetDof(coordSection, v, dimEmbed));
1549     CHKERRQ(PetscSectionSetFieldDof(coordSection, v, 0, dimEmbed));
1550   }
1551   CHKERRQ(PetscSectionSetUp(coordSection));
1552   CHKERRQ(PetscSectionGetStorageSize(coordSection, &coordSize));
1553   CHKERRQ(VecCreate(PETSC_COMM_SELF, &coordinates));
1554   CHKERRQ(PetscObjectSetName((PetscObject) coordinates, "coordinates"));
1555   CHKERRQ(VecSetSizes(coordinates, coordSize, PETSC_DETERMINE));
1556   CHKERRQ(VecSetBlockSize(coordinates, dimEmbed));
1557   CHKERRQ(VecSetType(coordinates,VECSTANDARD));
1558   CHKERRQ(VecGetArray(coordinates, &coords));
1559   if (rank == 0) {
1560     PetscReal *x, *y, *z;
1561 
1562     CHKERRQ(PetscMalloc3(numVertices,&x,numVertices,&y,numVertices,&z));
1563     PetscStackCallStandard(ex_get_coord,exoid, x, y, z);
1564     if (dimEmbed > 0) {
1565       for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+0] = x[v];
1566     }
1567     if (dimEmbed > 1) {
1568       for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+1] = y[v];
1569     }
1570     if (dimEmbed > 2) {
1571       for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+2] = z[v];
1572     }
1573     CHKERRQ(PetscFree3(x,y,z));
1574   }
1575   CHKERRQ(VecRestoreArray(coordinates, &coords));
1576   CHKERRQ(DMSetCoordinatesLocal(*dm, coordinates));
1577   CHKERRQ(VecDestroy(&coordinates));
1578 
1579   /* Create side set label */
1580   if (rank == 0 && interpolate && (num_fs > 0)) {
1581     int fs, f, voff;
1582     /* Read from ex_get_side_set_ids() */
1583     int *fs_id;
1584     /* Read from ex_get_side_set_param() */
1585     int num_side_in_set;
1586     /* Read from ex_get_side_set_node_list() */
1587     int *fs_vertex_count_list, *fs_vertex_list;
1588     /* Read side set labels */
1589     char fs_name[MAX_STR_LENGTH+1];
1590 
1591     /* Get side set ids */
1592     CHKERRQ(PetscMalloc1(num_fs, &fs_id));
1593     PetscStackCallStandard(ex_get_ids,exoid, EX_SIDE_SET, fs_id);
1594     for (fs = 0; fs < num_fs; ++fs) {
1595       PetscStackCallStandard(ex_get_set_param,exoid, EX_SIDE_SET, fs_id[fs], &num_side_in_set, NULL);
1596       CHKERRQ(PetscMalloc2(num_side_in_set,&fs_vertex_count_list,num_side_in_set*4,&fs_vertex_list));
1597       PetscStackCallStandard(ex_get_side_set_node_list,exoid, fs_id[fs], fs_vertex_count_list, fs_vertex_list);
1598       /* Get the specific name associated with this side set ID. */
1599       int fs_name_err = ex_get_name(exoid, EX_SIDE_SET, fs_id[fs], fs_name);
1600       for (f = 0, voff = 0; f < num_side_in_set; ++f) {
1601         const PetscInt *faces   = NULL;
1602         PetscInt       faceSize = fs_vertex_count_list[f], numFaces;
1603         PetscInt       faceVertices[4], v;
1604 
1605         PetscCheckFalse(faceSize > 4,comm, PETSC_ERR_ARG_WRONG, "ExodusII side cannot have %d > 4 vertices", faceSize);
1606         for (v = 0; v < faceSize; ++v, ++voff) {
1607           faceVertices[v] = fs_vertex_list[voff]+numCells-1;
1608         }
1609         CHKERRQ(DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces));
1610         PetscCheckFalse(numFaces != 1,comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %d faces", f, fs, numFaces);
1611         CHKERRQ(DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs]));
1612         /* Only add the label if one has been detected for this side set. */
1613         if (!fs_name_err) {
1614           CHKERRQ(DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs]));
1615         }
1616         CHKERRQ(DMPlexRestoreJoin(*dm, faceSize, faceVertices, &numFaces, &faces));
1617       }
1618       CHKERRQ(PetscFree2(fs_vertex_count_list,fs_vertex_list));
1619     }
1620     CHKERRQ(PetscFree(fs_id));
1621   }
1622 
1623   { /* Create Cell/Face/Vertex Sets labels at all processes */
1624     enum {n = 3};
1625     PetscBool flag[n];
1626 
1627     flag[0] = cellSets ? PETSC_TRUE : PETSC_FALSE;
1628     flag[1] = faceSets ? PETSC_TRUE : PETSC_FALSE;
1629     flag[2] = vertSets ? PETSC_TRUE : PETSC_FALSE;
1630     CHKERRMPI(MPI_Bcast(flag, n, MPIU_BOOL, 0, comm));
1631     if (flag[0]) CHKERRQ(DMCreateLabel(*dm, "Cell Sets"));
1632     if (flag[1]) CHKERRQ(DMCreateLabel(*dm, "Face Sets"));
1633     if (flag[2]) CHKERRQ(DMCreateLabel(*dm, "Vertex Sets"));
1634   }
1635   PetscFunctionReturn(0);
1636 #else
1637   SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1638 #endif
1639 }
1640