xref: /petsc/src/dm/impls/plex/tests/ex26.c (revision a69119a591a03a9d906b29c0a4e9802e4d7c9795)
1 static char help[] = "Test FEM layout with DM and ExodusII storage\n\n";
2 
3 /*
4   In order to see the vectors which are being tested, use
5 
6      -ua_vec_view -s_vec_view
7 */
8 
9 #include <petsc.h>
10 #include <exodusII.h>
11 
12 #include <petsc/private/dmpleximpl.h>
13 
14 int main(int argc, char **argv) {
15   DM              dm, pdm, dmU, dmA, dmS, dmUA, dmUA2, *dmList;
16   Vec             X, U, A, S, UA, UA2;
17   IS              isU, isA, isS, isUA;
18   PetscSection    section;
19   const PetscInt  fieldU     = 0;
20   const PetscInt  fieldA     = 2;
21   const PetscInt  fieldS     = 1;
22   const PetscInt  fieldUA[2] = {0, 2};
23   char            ifilename[PETSC_MAX_PATH_LEN], ofilename[PETSC_MAX_PATH_LEN];
24   int             exoid = -1;
25   IS              csIS;
26   const PetscInt *csID;
27   PetscInt       *pStartDepth, *pEndDepth;
28   PetscInt        order = 1;
29   PetscInt        sdim, d, pStart, pEnd, p, numCS, set;
30   PetscMPIInt     rank, size;
31   PetscViewer     viewer;
32 
33   PetscFunctionBeginUser;
34   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
35   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
36   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
37   PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "FEM Layout Options", "ex26");
38   PetscCall(PetscOptionsString("-i", "Filename to read", "ex26", ifilename, ifilename, sizeof(ifilename), NULL));
39   PetscCall(PetscOptionsString("-o", "Filename to write", "ex26", ofilename, ofilename, sizeof(ofilename), NULL));
40   PetscCall(PetscOptionsBoundedInt("-order", "FEM polynomial order", "ex26", order, &order, NULL, 1));
41   PetscOptionsEnd();
42   PetscCheck((order >= 1) && (order <= 2), PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Unsupported polynomial order %" PetscInt_FMT " not in [1, 2]", order);
43 
44   /* Read the mesh from a file in any supported format */
45   PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, ifilename, NULL, PETSC_TRUE, &dm));
46   PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
47   PetscCall(DMSetFromOptions(dm));
48   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
49   PetscCall(DMGetDimension(dm, &sdim));
50 
51   /* Create the exodus result file */
52   {
53     PetscInt numstep = 3, step;
54     char    *nodalVarName[4];
55     char    *zonalVarName[6];
56     int     *truthtable;
57     PetscInt numNodalVar, numZonalVar, i;
58 
59     /* enable exodus debugging information */
60     ex_opts(EX_VERBOSE | EX_DEBUG);
61     /* Create the exodus file */
62     PetscCall(PetscViewerExodusIIOpen(PETSC_COMM_WORLD, ofilename, FILE_MODE_WRITE, &viewer));
63     /* The long way would be */
64     /*
65       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD,&viewer));
66       PetscCall(PetscViewerSetType(viewer,PETSCVIEWEREXODUSII));
67       PetscCall(PetscViewerFileSetMode(viewer,FILE_MODE_APPEND));
68       PetscCall(PetscViewerFileSetName(viewer,ofilename));
69     */
70     /* set the mesh order */
71     PetscCall(PetscViewerExodusIISetOrder(viewer, order));
72     PetscCall(PetscViewerView(viewer, PETSC_VIEWER_STDOUT_WORLD));
73     /*
74       Notice how the exodus file is actually NOT open at this point (exoid is -1)
75       Since we are overwritting the file (mode is FILE_MODE_WRITE), we are going to have to
76       write the geometry (the DM), which can only be done on a brand new file.
77     */
78 
79     /* Save the geometry to the file, erasing all previous content */
80     PetscCall(DMView(dm, viewer));
81     PetscCall(PetscViewerView(viewer, PETSC_VIEWER_STDOUT_WORLD));
82     /*
83       Note how the exodus file is now open
84     */
85 
86     /* "Format" the exodus result file, i.e. allocate space for nodal and zonal variables */
87     switch (sdim) {
88     case 2:
89       numNodalVar     = 3;
90       nodalVarName[0] = (char *)"U_x";
91       nodalVarName[1] = (char *)"U_y";
92       nodalVarName[2] = (char *)"Alpha";
93       numZonalVar     = 3;
94       zonalVarName[0] = (char *)"Sigma_11";
95       zonalVarName[1] = (char *)"Sigma_22";
96       zonalVarName[2] = (char *)"Sigma_12";
97       break;
98     case 3:
99       numNodalVar     = 4;
100       nodalVarName[0] = (char *)"U_x";
101       nodalVarName[1] = (char *)"U_y";
102       nodalVarName[2] = (char *)"U_z";
103       nodalVarName[3] = (char *)"Alpha";
104       numZonalVar     = 6;
105       zonalVarName[0] = (char *)"Sigma_11";
106       zonalVarName[1] = (char *)"Sigma_22";
107       zonalVarName[2] = (char *)"Sigma_33";
108       zonalVarName[3] = (char *)"Sigma_23";
109       zonalVarName[4] = (char *)"Sigma_13";
110       zonalVarName[5] = (char *)"Sigma_12";
111       break;
112     default: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No layout for dimension %" PetscInt_FMT, sdim);
113     }
114     PetscCall(PetscViewerExodusIIGetId(viewer, &exoid));
115     PetscCallExternal(ex_put_variable_param, exoid, EX_ELEM_BLOCK, numZonalVar);
116     PetscCallExternal(ex_put_variable_names, exoid, EX_ELEM_BLOCK, numZonalVar, zonalVarName);
117     PetscCallExternal(ex_put_variable_param, exoid, EX_NODAL, numNodalVar);
118     PetscCallExternal(ex_put_variable_names, exoid, EX_NODAL, numNodalVar, nodalVarName);
119     numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
120 
121     /*
122       An exodusII truth table specifies which fields are saved at which time step
123       It speeds up I/O but reserving space for fieldsin the file ahead of time.
124     */
125     PetscCall(PetscMalloc1(numZonalVar * numCS, &truthtable));
126     for (i = 0; i < numZonalVar * numCS; ++i) truthtable[i] = 1;
127     PetscCallExternal(ex_put_truth_table, exoid, EX_ELEM_BLOCK, numCS, numZonalVar, truthtable);
128     PetscCall(PetscFree(truthtable));
129 
130     /* Writing time step information in the file. Note that this is currently broken in the exodus library for netcdf4 (HDF5-based) files */
131     for (step = 0; step < numstep; ++step) {
132       PetscReal time = step;
133       PetscCallExternal(ex_put_time, exoid, step + 1, &time);
134     }
135   }
136 
137   /* Create the main section containing all fields */
138   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject)dm), &section));
139   PetscCall(PetscSectionSetNumFields(section, 3));
140   PetscCall(PetscSectionSetFieldName(section, fieldU, "U"));
141   PetscCall(PetscSectionSetFieldName(section, fieldA, "Alpha"));
142   PetscCall(PetscSectionSetFieldName(section, fieldS, "Sigma"));
143   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
144   PetscCall(PetscSectionSetChart(section, pStart, pEnd));
145   PetscCall(PetscMalloc2(sdim + 1, &pStartDepth, sdim + 1, &pEndDepth));
146   for (d = 0; d <= sdim; ++d) PetscCall(DMPlexGetDepthStratum(dm, d, &pStartDepth[d], &pEndDepth[d]));
147   /* Vector field U, Scalar field Alpha, Tensor field Sigma */
148   PetscCall(PetscSectionSetFieldComponents(section, fieldU, sdim));
149   PetscCall(PetscSectionSetFieldComponents(section, fieldA, 1));
150   PetscCall(PetscSectionSetFieldComponents(section, fieldS, sdim * (sdim + 1) / 2));
151 
152   /* Going through cell sets then cells, and setting up storage for the sections */
153   PetscCall(DMGetLabelSize(dm, "Cell Sets", &numCS));
154   PetscCall(DMGetLabelIdIS(dm, "Cell Sets", &csIS));
155   if (csIS) PetscCall(ISGetIndices(csIS, &csID));
156   for (set = 0; set < numCS; set++) {
157     IS              cellIS;
158     const PetscInt *cellID;
159     PetscInt        numCells, cell, closureSize, *closureA = NULL;
160 
161     PetscCall(DMGetStratumSize(dm, "Cell Sets", csID[set], &numCells));
162     PetscCall(DMGetStratumIS(dm, "Cell Sets", csID[set], &cellIS));
163     if (numCells > 0) {
164       /* dof layout ordered by increasing height in the DAG: cell, face, edge, vertex */
165       PetscInt  dofUP1Tri[]  = {2, 0, 0};
166       PetscInt  dofAP1Tri[]  = {1, 0, 0};
167       PetscInt  dofUP2Tri[]  = {2, 2, 0};
168       PetscInt  dofAP2Tri[]  = {1, 1, 0};
169       PetscInt  dofUP1Quad[] = {2, 0, 0};
170       PetscInt  dofAP1Quad[] = {1, 0, 0};
171       PetscInt  dofUP2Quad[] = {2, 2, 2};
172       PetscInt  dofAP2Quad[] = {1, 1, 1};
173       PetscInt  dofS2D[]     = {0, 0, 3};
174       PetscInt  dofUP1Tet[]  = {3, 0, 0, 0};
175       PetscInt  dofAP1Tet[]  = {1, 0, 0, 0};
176       PetscInt  dofUP2Tet[]  = {3, 3, 0, 0};
177       PetscInt  dofAP2Tet[]  = {1, 1, 0, 0};
178       PetscInt  dofUP1Hex[]  = {3, 0, 0, 0};
179       PetscInt  dofAP1Hex[]  = {1, 0, 0, 0};
180       PetscInt  dofUP2Hex[]  = {3, 3, 3, 3};
181       PetscInt  dofAP2Hex[]  = {1, 1, 1, 1};
182       PetscInt  dofS3D[]     = {0, 0, 0, 6};
183       PetscInt *dofU, *dofA, *dofS;
184 
185       switch (sdim) {
186       case 2: dofS = dofS2D; break;
187       case 3: dofS = dofS3D; break;
188       default: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "No layout for dimension %" PetscInt_FMT, sdim);
189       }
190 
191       /* Identify cell type based on closure size only. This works for Tri/Tet/Quad/Hex meshes
192          It will not be enough to identify more exotic elements like pyramid or prisms...  */
193       PetscCall(ISGetIndices(cellIS, &cellID));
194       PetscCall(DMPlexGetTransitiveClosure(dm, cellID[0], PETSC_TRUE, &closureSize, &closureA));
195       switch (closureSize) {
196       case 7: /* Tri */
197         if (order == 1) {
198           dofU = dofUP1Tri;
199           dofA = dofAP1Tri;
200         } else {
201           dofU = dofUP2Tri;
202           dofA = dofAP2Tri;
203         }
204         break;
205       case 9: /* Quad */
206         if (order == 1) {
207           dofU = dofUP1Quad;
208           dofA = dofAP1Quad;
209         } else {
210           dofU = dofUP2Quad;
211           dofA = dofAP2Quad;
212         }
213         break;
214       case 15: /* Tet */
215         if (order == 1) {
216           dofU = dofUP1Tet;
217           dofA = dofAP1Tet;
218         } else {
219           dofU = dofUP2Tet;
220           dofA = dofAP2Tet;
221         }
222         break;
223       case 27: /* Hex */
224         if (order == 1) {
225           dofU = dofUP1Hex;
226           dofA = dofAP1Hex;
227         } else {
228           dofU = dofUP2Hex;
229           dofA = dofAP2Hex;
230         }
231         break;
232       default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Unknown element with closure size %" PetscInt_FMT, closureSize);
233       }
234       PetscCall(DMPlexRestoreTransitiveClosure(dm, cellID[0], PETSC_TRUE, &closureSize, &closureA));
235 
236       for (cell = 0; cell < numCells; cell++) {
237         PetscInt *closure = NULL;
238 
239         PetscCall(DMPlexGetTransitiveClosure(dm, cellID[cell], PETSC_TRUE, &closureSize, &closure));
240         for (p = 0; p < closureSize; ++p) {
241           /* Find depth of p */
242           for (d = 0; d <= sdim; ++d) {
243             if ((closure[2 * p] >= pStartDepth[d]) && (closure[2 * p] < pEndDepth[d])) {
244               PetscCall(PetscSectionSetDof(section, closure[2 * p], dofU[d] + dofA[d] + dofS[d]));
245               PetscCall(PetscSectionSetFieldDof(section, closure[2 * p], fieldU, dofU[d]));
246               PetscCall(PetscSectionSetFieldDof(section, closure[2 * p], fieldA, dofA[d]));
247               PetscCall(PetscSectionSetFieldDof(section, closure[2 * p], fieldS, dofS[d]));
248             }
249           }
250         }
251         PetscCall(DMPlexRestoreTransitiveClosure(dm, cellID[cell], PETSC_TRUE, &closureSize, &closure));
252       }
253       PetscCall(ISRestoreIndices(cellIS, &cellID));
254       PetscCall(ISDestroy(&cellIS));
255     }
256   }
257   if (csIS) PetscCall(ISRestoreIndices(csIS, &csID));
258   PetscCall(ISDestroy(&csIS));
259   PetscCall(PetscSectionSetUp(section));
260   PetscCall(DMSetLocalSection(dm, section));
261   PetscCall(PetscObjectViewFromOptions((PetscObject)section, NULL, "-dm_section_view"));
262   PetscCall(PetscSectionDestroy(&section));
263 
264   {
265     PetscSF          migrationSF;
266     PetscInt         ovlp = 0;
267     PetscPartitioner part;
268 
269     PetscCall(DMSetUseNatural(dm, PETSC_TRUE));
270     PetscCall(DMPlexGetPartitioner(dm, &part));
271     PetscCall(PetscPartitionerSetFromOptions(part));
272     PetscCall(DMPlexDistribute(dm, ovlp, &migrationSF, &pdm));
273     if (!pdm) pdm = dm;
274     /* Set the migrationSF is mandatory since useNatural = PETSC_TRUE */
275     if (migrationSF) {
276       PetscCall(DMPlexSetMigrationSF(pdm, migrationSF));
277       PetscCall(PetscSFDestroy(&migrationSF));
278     }
279     PetscCall(DMViewFromOptions(pdm, NULL, "-dm_view"));
280   }
281 
282   /* Get DM and IS for each field of dm */
283   PetscCall(DMCreateSubDM(pdm, 1, &fieldU, &isU, &dmU));
284   PetscCall(DMCreateSubDM(pdm, 1, &fieldA, &isA, &dmA));
285   PetscCall(DMCreateSubDM(pdm, 1, &fieldS, &isS, &dmS));
286   PetscCall(DMCreateSubDM(pdm, 2, fieldUA, &isUA, &dmUA));
287 
288   PetscCall(PetscMalloc1(2, &dmList));
289   dmList[0] = dmU;
290   dmList[1] = dmA;
291 
292   PetscCall(DMCreateSuperDM(dmList, 2, NULL, &dmUA2));
293   PetscCall(PetscFree(dmList));
294 
295   PetscCall(DMGetGlobalVector(pdm, &X));
296   PetscCall(DMGetGlobalVector(dmU, &U));
297   PetscCall(DMGetGlobalVector(dmA, &A));
298   PetscCall(DMGetGlobalVector(dmS, &S));
299   PetscCall(DMGetGlobalVector(dmUA, &UA));
300   PetscCall(DMGetGlobalVector(dmUA2, &UA2));
301 
302   PetscCall(PetscObjectSetName((PetscObject)U, "U"));
303   PetscCall(PetscObjectSetName((PetscObject)A, "Alpha"));
304   PetscCall(PetscObjectSetName((PetscObject)S, "Sigma"));
305   PetscCall(PetscObjectSetName((PetscObject)UA, "UAlpha"));
306   PetscCall(PetscObjectSetName((PetscObject)UA2, "UAlpha2"));
307   PetscCall(VecSet(X, -111.));
308 
309   /* Setting u to [x,y,z]  and alpha to x^2+y^2+z^2 by writing in UAlpha then restricting to U and Alpha */
310   {
311     PetscSection sectionUA;
312     Vec          UALoc;
313     PetscSection coordSection;
314     Vec          coord;
315     PetscScalar *cval, *xyz;
316     PetscInt     clSize, i, j;
317 
318     PetscCall(DMGetLocalSection(dmUA, &sectionUA));
319     PetscCall(DMGetLocalVector(dmUA, &UALoc));
320     PetscCall(VecGetArray(UALoc, &cval));
321     PetscCall(DMGetCoordinateSection(dmUA, &coordSection));
322     PetscCall(DMGetCoordinatesLocal(dmUA, &coord));
323     PetscCall(DMPlexGetChart(dmUA, &pStart, &pEnd));
324 
325     for (p = pStart; p < pEnd; ++p) {
326       PetscInt dofUA, offUA;
327 
328       PetscCall(PetscSectionGetDof(sectionUA, p, &dofUA));
329       if (dofUA > 0) {
330         xyz = NULL;
331         PetscCall(PetscSectionGetOffset(sectionUA, p, &offUA));
332         PetscCall(DMPlexVecGetClosure(dmUA, coordSection, coord, p, &clSize, &xyz));
333         cval[offUA + sdim] = 0.;
334         for (i = 0; i < sdim; ++i) {
335           cval[offUA + i] = 0;
336           for (j = 0; j < clSize / sdim; ++j) cval[offUA + i] += xyz[j * sdim + i];
337           cval[offUA + i] = cval[offUA + i] * sdim / clSize;
338           cval[offUA + sdim] += PetscSqr(cval[offUA + i]);
339         }
340         PetscCall(DMPlexVecRestoreClosure(dmUA, coordSection, coord, p, &clSize, &xyz));
341       }
342     }
343     PetscCall(VecRestoreArray(UALoc, &cval));
344     PetscCall(DMLocalToGlobalBegin(dmUA, UALoc, INSERT_VALUES, UA));
345     PetscCall(DMLocalToGlobalEnd(dmUA, UALoc, INSERT_VALUES, UA));
346     PetscCall(DMRestoreLocalVector(dmUA, &UALoc));
347 
348     /* Update X */
349     PetscCall(VecISCopy(X, isUA, SCATTER_FORWARD, UA));
350     PetscCall(VecViewFromOptions(UA, NULL, "-ua_vec_view"));
351 
352     /* Restrict to U and Alpha */
353     PetscCall(VecISCopy(X, isU, SCATTER_REVERSE, U));
354     PetscCall(VecISCopy(X, isA, SCATTER_REVERSE, A));
355 
356     /* restrict to UA2 */
357     PetscCall(VecISCopy(X, isUA, SCATTER_REVERSE, UA2));
358     PetscCall(VecViewFromOptions(UA2, NULL, "-ua2_vec_view"));
359   }
360 
361   {
362     Vec          tmpVec;
363     PetscSection coordSection;
364     Vec          coord;
365     PetscReal    norm;
366     PetscReal    time = 1.234;
367 
368     /* Writing nodal variables to ExodusII file */
369     PetscCall(DMSetOutputSequenceNumber(dmU, 0, time));
370     PetscCall(DMSetOutputSequenceNumber(dmA, 0, time));
371 
372     PetscCall(VecView(U, viewer));
373     PetscCall(VecView(A, viewer));
374 
375     /* Saving U and Alpha in one shot.
376        For this, we need to cheat and change the Vec's name
377        Note that in the end we write variables one component at a time,
378        so that there is no real values in doing this
379     */
380 
381     PetscCall(DMSetOutputSequenceNumber(dmUA, 1, time));
382     PetscCall(DMGetGlobalVector(dmUA, &tmpVec));
383     PetscCall(VecCopy(UA, tmpVec));
384     PetscCall(PetscObjectSetName((PetscObject)tmpVec, "U"));
385     PetscCall(VecView(tmpVec, viewer));
386     /* Reading nodal variables in Exodus file */
387     PetscCall(VecSet(tmpVec, -1000.0));
388     PetscCall(VecLoad(tmpVec, viewer));
389     PetscCall(VecAXPY(UA, -1.0, tmpVec));
390     PetscCall(VecNorm(UA, NORM_INFINITY, &norm));
391     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "UAlpha ||Vin - Vout|| = %g", (double)norm);
392     PetscCall(DMRestoreGlobalVector(dmUA, &tmpVec));
393 
394     /* same thing with the UA2 Vec obtained from the superDM */
395     PetscCall(DMGetGlobalVector(dmUA2, &tmpVec));
396     PetscCall(VecCopy(UA2, tmpVec));
397     PetscCall(PetscObjectSetName((PetscObject)tmpVec, "U"));
398     PetscCall(DMSetOutputSequenceNumber(dmUA2, 2, time));
399     PetscCall(VecView(tmpVec, viewer));
400     /* Reading nodal variables in Exodus file */
401     PetscCall(VecSet(tmpVec, -1000.0));
402     PetscCall(VecLoad(tmpVec, viewer));
403     PetscCall(VecAXPY(UA2, -1.0, tmpVec));
404     PetscCall(VecNorm(UA2, NORM_INFINITY, &norm));
405     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "UAlpha2 ||Vin - Vout|| = %g", (double)norm);
406     PetscCall(DMRestoreGlobalVector(dmUA2, &tmpVec));
407 
408     /* Building and saving Sigma
409        We set sigma_0 = rank (to see partitioning)
410               sigma_1 = cell set ID
411               sigma_2 = x_coordinate of the cell center of mass
412     */
413     PetscCall(DMGetCoordinateSection(dmS, &coordSection));
414     PetscCall(DMGetCoordinatesLocal(dmS, &coord));
415     PetscCall(DMGetLabelIdIS(dmS, "Cell Sets", &csIS));
416     PetscCall(DMGetLabelSize(dmS, "Cell Sets", &numCS));
417     PetscCall(ISGetIndices(csIS, &csID));
418     for (set = 0; set < numCS; ++set) {
419       /* We know that all cells in a cell set have the same type, so we can dimension cval and xyz once for each cell set */
420       IS              cellIS;
421       const PetscInt *cellID;
422       PetscInt        numCells, cell;
423       PetscScalar    *cval = NULL, *xyz = NULL;
424       PetscInt        clSize, cdimCoord, c;
425 
426       PetscCall(DMGetStratumIS(dmS, "Cell Sets", csID[set], &cellIS));
427       PetscCall(ISGetIndices(cellIS, &cellID));
428       PetscCall(ISGetSize(cellIS, &numCells));
429       for (cell = 0; cell < numCells; cell++) {
430         PetscCall(DMPlexVecGetClosure(dmS, NULL, S, cellID[cell], &clSize, &cval));
431         PetscCall(DMPlexVecGetClosure(dmS, coordSection, coord, cellID[cell], &cdimCoord, &xyz));
432         cval[0] = rank;
433         cval[1] = csID[set];
434         cval[2] = 0.;
435         for (c = 0; c < cdimCoord / sdim; c++) cval[2] += xyz[c * sdim];
436         cval[2] = cval[2] * sdim / cdimCoord;
437         PetscCall(DMPlexVecSetClosure(dmS, NULL, S, cellID[cell], cval, INSERT_ALL_VALUES));
438       }
439       PetscCall(DMPlexVecRestoreClosure(dmS, NULL, S, cellID[0], &clSize, &cval));
440       PetscCall(DMPlexVecRestoreClosure(dmS, coordSection, coord, cellID[0], NULL, &xyz));
441       PetscCall(ISRestoreIndices(cellIS, &cellID));
442       PetscCall(ISDestroy(&cellIS));
443     }
444     PetscCall(ISRestoreIndices(csIS, &csID));
445     PetscCall(ISDestroy(&csIS));
446     PetscCall(VecViewFromOptions(S, NULL, "-s_vec_view"));
447 
448     /* Writing zonal variables in Exodus file */
449     PetscCall(DMSetOutputSequenceNumber(dmS, 0, time));
450     PetscCall(VecView(S, viewer));
451 
452     /* Reading zonal variables in Exodus file */
453     PetscCall(DMGetGlobalVector(dmS, &tmpVec));
454     PetscCall(VecSet(tmpVec, -1000.0));
455     PetscCall(PetscObjectSetName((PetscObject)tmpVec, "Sigma"));
456     PetscCall(VecLoad(tmpVec, viewer));
457     PetscCall(VecAXPY(S, -1.0, tmpVec));
458     PetscCall(VecNorm(S, NORM_INFINITY, &norm));
459     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON, PetscObjectComm((PetscObject)dm), PETSC_ERR_PLIB, "Sigma ||Vin - Vout|| = %g", (double)norm);
460     PetscCall(DMRestoreGlobalVector(dmS, &tmpVec));
461   }
462   PetscCall(PetscViewerDestroy(&viewer));
463 
464   PetscCall(DMRestoreGlobalVector(dmUA2, &UA2));
465   PetscCall(DMRestoreGlobalVector(dmUA, &UA));
466   PetscCall(DMRestoreGlobalVector(dmS, &S));
467   PetscCall(DMRestoreGlobalVector(dmA, &A));
468   PetscCall(DMRestoreGlobalVector(dmU, &U));
469   PetscCall(DMRestoreGlobalVector(pdm, &X));
470   PetscCall(DMDestroy(&dmU));
471   PetscCall(ISDestroy(&isU));
472   PetscCall(DMDestroy(&dmA));
473   PetscCall(ISDestroy(&isA));
474   PetscCall(DMDestroy(&dmS));
475   PetscCall(ISDestroy(&isS));
476   PetscCall(DMDestroy(&dmUA));
477   PetscCall(ISDestroy(&isUA));
478   PetscCall(DMDestroy(&dmUA2));
479   PetscCall(DMDestroy(&pdm));
480   if (size > 1) PetscCall(DMDestroy(&dm));
481   PetscCall(PetscFree2(pStartDepth, pEndDepth));
482   PetscCall(PetscFinalize());
483   return 0;
484 }
485 
486 /*TEST
487 
488   build:
489     requires: exodusii pnetcdf !complex
490   # 2D seq
491   test:
492     suffix: 0
493     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 1
494     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
495   test:
496     suffix: 1
497     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 1
498   test:
499     suffix: 2
500     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 1
501     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
502   test:
503     suffix: 3
504     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 2
505   test:
506     suffix: 4
507     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 2
508   test:
509     suffix: 5
510     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 2
511 
512   # 2D par
513   test:
514     suffix: 6
515     nsize: 2
516     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 1
517     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
518   test:
519     suffix: 7
520     nsize: 2
521     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 1
522   test:
523     suffix: 8
524     nsize: 2
525     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 1
526     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: invalid dimension ID or name
527   test:
528     suffix: 9
529     nsize: 2
530     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -petscpartitioner_type simple -order 2
531   test:
532     suffix: 10
533     nsize: 2
534     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -petscpartitioner_type simple -order 2
535   test:
536     # Something is now broken with parallel read/write for wahtever shape H is
537     TODO: broken
538     suffix: 11
539     nsize: 2
540     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -petscpartitioner_type simple -order 2
541 
542   #3d seq
543   test:
544     suffix: 12
545     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 1
546   test:
547     suffix: 13
548     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 1
549     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
550   test:
551     suffix: 14
552     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 2
553   test:
554     suffix: 15
555     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 2
556     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
557   #3d par
558   test:
559     suffix: 16
560     nsize: 2
561     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 1
562   test:
563     suffix: 17
564     nsize: 2
565     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 1
566     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
567   test:
568     suffix: 18
569     nsize: 2
570     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -petscpartitioner_type simple -order 2
571   test:
572     suffix: 19
573     nsize: 2
574     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -petscpartitioner_type simple -order 2
575     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
576 
577 TEST*/
578