xref: /petsc/src/dm/impls/plex/tests/ex26.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "Test FEM layout with DM and ExodusII storage\n\n";
2 
3 /*
4   In order to see the vectors which are being tested, use
5 
6      -ua_vec_view -s_vec_view
7 */
8 
9 #include <petsc.h>
10 #include <exodusII.h>
11 
12 #include <petsc/private/dmpleximpl.h>
13 
14 int main(int argc, char **argv) {
15   DM                dm, dmU, dmA, dmS, dmUA, dmUA2, *dmList;
16   Vec               X, U, A, S, UA, UA2;
17   IS                isU, isA, isS, isUA;
18   PetscSection      section;
19   const PetscInt    fieldU = 0;
20   const PetscInt    fieldA = 2;
21   const PetscInt    fieldS = 1;
22   const PetscInt    fieldUA[2] = {0, 2};
23   char              ifilename[PETSC_MAX_PATH_LEN], ofilename[PETSC_MAX_PATH_LEN];
24   int               exoid = -1;
25   IS                csIS;
26   const PetscInt   *csID;
27   PetscInt         *pStartDepth, *pEndDepth;
28   PetscInt          order = 1;
29   PetscInt          sdim, d, pStart, pEnd, p, numCS, set;
30   PetscMPIInt       rank, size;
31   PetscViewer       viewer;
32 
33   PetscCall(PetscInitialize(&argc, &argv,NULL, help));
34   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
35   PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
36   PetscOptionsBegin(PETSC_COMM_WORLD, NULL, "FEM Layout Options", "ex26");
37   PetscCall(PetscOptionsString("-i", "Filename to read", "ex26", ifilename, ifilename, sizeof(ifilename), NULL));
38   PetscCall(PetscOptionsString("-o", "Filename to write", "ex26", ofilename, ofilename, sizeof(ofilename), NULL));
39   PetscCall(PetscOptionsBoundedInt("-order", "FEM polynomial order", "ex26", order, &order, NULL,1));
40   PetscOptionsEnd();
41   PetscCheck((order >= 1) && (order <= 2),PETSC_COMM_WORLD, PETSC_ERR_ARG_OUTOFRANGE, "Unsupported polynomial order %" PetscInt_FMT " not in [1, 2]", order);
42 
43   /* Read the mesh from a file in any supported format */
44   PetscCall(DMPlexCreateFromFile(PETSC_COMM_WORLD, ifilename, NULL, PETSC_TRUE, &dm));
45   PetscCall(DMPlexDistributeSetDefault(dm, PETSC_FALSE));
46   PetscCall(DMSetFromOptions(dm));
47   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
48   PetscCall(DMGetDimension(dm, &sdim));
49 
50   /* Create the exodus result file */
51   {
52     PetscInt      numstep = 3, step;
53     char         *nodalVarName[4];
54     char         *zonalVarName[6];
55     int          *truthtable;
56     PetscInt      numNodalVar, numZonalVar, i;
57 
58     /* enable exodus debugging information */
59     ex_opts(EX_VERBOSE|EX_DEBUG);
60     /* Create the exodus file */
61     PetscCall(PetscViewerExodusIIOpen(PETSC_COMM_WORLD,ofilename,FILE_MODE_WRITE,&viewer));
62     /* The long way would be */
63     /*
64       PetscCall(PetscViewerCreate(PETSC_COMM_WORLD,&viewer));
65       PetscCall(PetscViewerSetType(viewer,PETSCVIEWEREXODUSII));
66       PetscCall(PetscViewerFileSetMode(viewer,FILE_MODE_APPEND));
67       PetscCall(PetscViewerFileSetName(viewer,ofilename));
68     */
69     /* set the mesh order */
70     PetscCall(PetscViewerExodusIISetOrder(viewer,order));
71     PetscCall(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD));
72     /*
73       Notice how the exodus file is actually NOT open at this point (exoid is -1)
74       Since we are overwritting the file (mode is FILE_MODE_WRITE), we are going to have to
75       write the geometry (the DM), which can only be done on a brand new file.
76     */
77 
78     /* Save the geometry to the file, erasing all previous content */
79     PetscCall(DMView(dm,viewer));
80     PetscCall(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD));
81     /*
82       Note how the exodus file is now open
83     */
84 
85     /* "Format" the exodus result file, i.e. allocate space for nodal and zonal variables */
86     switch (sdim) {
87     case 2:
88       numNodalVar = 3;
89       nodalVarName[0] = (char *) "U_x";
90       nodalVarName[1] = (char *) "U_y";
91       nodalVarName[2] = (char *) "Alpha";
92       numZonalVar = 3;
93       zonalVarName[0] = (char *) "Sigma_11";
94       zonalVarName[1] = (char *) "Sigma_22";
95       zonalVarName[2] = (char *) "Sigma_12";
96       break;
97     case 3:
98       numNodalVar = 4;
99       nodalVarName[0] = (char *) "U_x";
100       nodalVarName[1] = (char *) "U_y";
101       nodalVarName[2] = (char *) "U_z";
102       nodalVarName[3] = (char *) "Alpha";
103       numZonalVar = 6;
104       zonalVarName[0] = (char *) "Sigma_11";
105       zonalVarName[1] = (char *) "Sigma_22";
106       zonalVarName[2] = (char *) "Sigma_33";
107       zonalVarName[3] = (char *) "Sigma_23";
108       zonalVarName[4] = (char *) "Sigma_13";
109       zonalVarName[5] = (char *) "Sigma_12";
110       break;
111     default: SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No layout for dimension %" PetscInt_FMT, sdim);
112     }
113     PetscCall(PetscViewerExodusIIGetId(viewer,&exoid));
114     PetscStackCallStandard(ex_put_variable_param,exoid, EX_ELEM_BLOCK, numZonalVar);
115     PetscStackCallStandard(ex_put_variable_names,exoid, EX_ELEM_BLOCK, numZonalVar, zonalVarName);
116     PetscStackCallStandard(ex_put_variable_param,exoid, EX_NODAL, numNodalVar);
117     PetscStackCallStandard(ex_put_variable_names,exoid, EX_NODAL, numNodalVar, nodalVarName);
118     numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
119 
120     /*
121       An exodusII truth table specifies which fields are saved at which time step
122       It speeds up I/O but reserving space for fieldsin the file ahead of time.
123     */
124     PetscCall(PetscMalloc1(numZonalVar * numCS, &truthtable));
125     for (i = 0; i < numZonalVar * numCS; ++i) truthtable[i] = 1;
126     PetscStackCallStandard(ex_put_truth_table,exoid, EX_ELEM_BLOCK, numCS, numZonalVar, truthtable);
127     PetscCall(PetscFree(truthtable));
128 
129     /* Writing time step information in the file. Note that this is currently broken in the exodus library for netcdf4 (HDF5-based) files */
130     for (step = 0; step < numstep; ++step) {
131       PetscReal time = step;
132       PetscStackCallStandard(ex_put_time,exoid, step+1, &time);
133     }
134   }
135 
136   /* Create the main section containing all fields */
137   PetscCall(PetscSectionCreate(PetscObjectComm((PetscObject) dm), &section));
138   PetscCall(PetscSectionSetNumFields(section, 3));
139   PetscCall(PetscSectionSetFieldName(section, fieldU, "U"));
140   PetscCall(PetscSectionSetFieldName(section, fieldA, "Alpha"));
141   PetscCall(PetscSectionSetFieldName(section, fieldS, "Sigma"));
142   PetscCall(DMPlexGetChart(dm, &pStart, &pEnd));
143   PetscCall(PetscSectionSetChart(section, pStart, pEnd));
144   PetscCall(PetscMalloc2(sdim+1, &pStartDepth, sdim+1, &pEndDepth));
145   for (d = 0; d <= sdim; ++d) PetscCall(DMPlexGetDepthStratum(dm, d, &pStartDepth[d], &pEndDepth[d]));
146   /* Vector field U, Scalar field Alpha, Tensor field Sigma */
147   PetscCall(PetscSectionSetFieldComponents(section, fieldU, sdim));
148   PetscCall(PetscSectionSetFieldComponents(section, fieldA, 1));
149   PetscCall(PetscSectionSetFieldComponents(section, fieldS, sdim*(sdim+1)/2));
150 
151   /* Going through cell sets then cells, and setting up storage for the sections */
152   PetscCall(DMGetLabelSize(dm, "Cell Sets", &numCS));
153   PetscCall(DMGetLabelIdIS(dm, "Cell Sets", &csIS));
154   if (csIS) PetscCall(ISGetIndices(csIS, &csID));
155   for (set = 0; set < numCS; set++) {
156     IS                cellIS;
157     const PetscInt   *cellID;
158     PetscInt          numCells, cell, closureSize, *closureA = NULL;
159 
160     PetscCall(DMGetStratumSize(dm, "Cell Sets", csID[set], &numCells));
161     PetscCall(DMGetStratumIS(dm, "Cell Sets", csID[set], &cellIS));
162     if (numCells > 0) {
163       /* dof layout ordered by increasing height in the DAG: cell, face, edge, vertex */
164       PetscInt          dofUP1Tri[]  = {2, 0, 0};
165       PetscInt          dofAP1Tri[]  = {1, 0, 0};
166       PetscInt          dofUP2Tri[]  = {2, 2, 0};
167       PetscInt          dofAP2Tri[]  = {1, 1, 0};
168       PetscInt          dofUP1Quad[] = {2, 0, 0};
169       PetscInt          dofAP1Quad[] = {1, 0, 0};
170       PetscInt          dofUP2Quad[] = {2, 2, 2};
171       PetscInt          dofAP2Quad[] = {1, 1, 1};
172       PetscInt          dofS2D[]     = {0, 0, 3};
173       PetscInt          dofUP1Tet[]  = {3, 0, 0, 0};
174       PetscInt          dofAP1Tet[]  = {1, 0, 0, 0};
175       PetscInt          dofUP2Tet[]  = {3, 3, 0, 0};
176       PetscInt          dofAP2Tet[]  = {1, 1, 0, 0};
177       PetscInt          dofUP1Hex[]  = {3, 0, 0, 0};
178       PetscInt          dofAP1Hex[]  = {1, 0, 0, 0};
179       PetscInt          dofUP2Hex[]  = {3, 3, 3, 3};
180       PetscInt          dofAP2Hex[]  = {1, 1, 1, 1};
181       PetscInt          dofS3D[]     = {0, 0, 0, 6};
182       PetscInt         *dofU, *dofA, *dofS;
183 
184       switch (sdim) {
185       case 2: dofS = dofS2D;break;
186       case 3: dofS = dofS3D;break;
187       default: SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "No layout for dimension %" PetscInt_FMT, sdim);
188       }
189 
190       /* Identify cell type based on closure size only. This works for Tri/Tet/Quad/Hex meshes
191          It will not be enough to identify more exotic elements like pyramid or prisms...  */
192       PetscCall(ISGetIndices(cellIS, &cellID));
193       PetscCall(DMPlexGetTransitiveClosure(dm, cellID[0], PETSC_TRUE, &closureSize, &closureA));
194       switch (closureSize) {
195         case 7: /* Tri */
196         if (order == 1) {
197           dofU = dofUP1Tri;
198           dofA = dofAP1Tri;
199         } else {
200           dofU = dofUP2Tri;
201           dofA = dofAP2Tri;
202         }
203         break;
204         case 9: /* Quad */
205         if (order == 1) {
206           dofU = dofUP1Quad;
207           dofA = dofAP1Quad;
208         } else {
209           dofU = dofUP2Quad;
210           dofA = dofAP2Quad;
211         }
212         break;
213         case 15: /* Tet */
214         if (order == 1) {
215           dofU = dofUP1Tet;
216           dofA = dofAP1Tet;
217         } else {
218           dofU = dofUP2Tet;
219           dofA = dofAP2Tet;
220         }
221         break;
222         case 27: /* Hex */
223         if (order == 1) {
224           dofU = dofUP1Hex;
225           dofA = dofAP1Hex;
226         } else {
227           dofU = dofUP2Hex;
228           dofA = dofAP2Hex;
229         }
230         break;
231         default: SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Unknown element with closure size %" PetscInt_FMT, closureSize);
232       }
233       PetscCall(DMPlexRestoreTransitiveClosure(dm, cellID[0], PETSC_TRUE, &closureSize, &closureA));
234 
235       for (cell = 0; cell < numCells; cell++) {
236         PetscInt *closure = NULL;
237 
238         PetscCall(DMPlexGetTransitiveClosure(dm, cellID[cell], PETSC_TRUE, &closureSize, &closure));
239         for (p = 0; p < closureSize; ++p) {
240           /* Find depth of p */
241           for (d = 0; d <= sdim; ++d) {
242             if ((closure[2*p] >= pStartDepth[d]) && (closure[2*p] < pEndDepth[d])) {
243               PetscCall(PetscSectionSetDof(section, closure[2*p], dofU[d]+dofA[d]+dofS[d]));
244               PetscCall(PetscSectionSetFieldDof(section, closure[2*p], fieldU, dofU[d]));
245               PetscCall(PetscSectionSetFieldDof(section, closure[2*p], fieldA, dofA[d]));
246               PetscCall(PetscSectionSetFieldDof(section, closure[2*p], fieldS, dofS[d]));
247             }
248           }
249         }
250         PetscCall(DMPlexRestoreTransitiveClosure(dm, cellID[cell], PETSC_TRUE, &closureSize, &closure));
251       }
252       PetscCall(ISRestoreIndices(cellIS, &cellID));
253       PetscCall(ISDestroy(&cellIS));
254     }
255   }
256   if (csIS) PetscCall(ISRestoreIndices(csIS, &csID));
257   PetscCall(ISDestroy(&csIS));
258   PetscCall(PetscSectionSetUp(section));
259   PetscCall(DMSetLocalSection(dm, section));
260   PetscCall(PetscObjectViewFromOptions((PetscObject) section, NULL, "-dm_section_view"));
261   PetscCall(PetscSectionDestroy(&section));
262 
263   {
264     DM               pdm;
265     PetscSF          migrationSF;
266     PetscInt         ovlp = 0;
267     PetscPartitioner part;
268 
269     PetscCall(DMSetUseNatural(dm,PETSC_TRUE));
270     PetscCall(DMPlexGetPartitioner(dm,&part));
271     PetscCall(PetscPartitionerSetFromOptions(part));
272     PetscCall(DMPlexDistribute(dm,ovlp,&migrationSF,&pdm));
273     if (pdm) {
274       PetscCall(DMPlexSetMigrationSF(pdm,migrationSF));
275       PetscCall(PetscSFDestroy(&migrationSF));
276       PetscCall(DMDestroy(&dm));
277       dm = pdm;
278       PetscCall(DMViewFromOptions(dm,NULL,"-dm_view"));
279     }
280   }
281 
282   /* Get DM and IS for each field of dm */
283   PetscCall(DMCreateSubDM(dm, 1, &fieldU, &isU,  &dmU));
284   PetscCall(DMCreateSubDM(dm, 1, &fieldA, &isA,  &dmA));
285   PetscCall(DMCreateSubDM(dm, 1, &fieldS, &isS,  &dmS));
286   PetscCall(DMCreateSubDM(dm, 2, fieldUA, &isUA, &dmUA));
287 
288   PetscCall(PetscMalloc1(2,&dmList));
289   dmList[0] = dmU;
290   dmList[1] = dmA;
291   /* We temporarily disable dmU->useNatural to test that we can reconstruct the
292      NaturaltoGlobal SF from any of the dm in dms
293   */
294   dmU->useNatural = PETSC_FALSE;
295   PetscCall(DMCreateSuperDM(dmList,2,NULL,&dmUA2));
296   dmU->useNatural = PETSC_TRUE;
297   PetscCall(PetscFree(dmList));
298 
299   PetscCall(DMGetGlobalVector(dm,   &X));
300   PetscCall(DMGetGlobalVector(dmU,  &U));
301   PetscCall(DMGetGlobalVector(dmA,  &A));
302   PetscCall(DMGetGlobalVector(dmS,  &S));
303   PetscCall(DMGetGlobalVector(dmUA, &UA));
304   PetscCall(DMGetGlobalVector(dmUA2, &UA2));
305 
306   PetscCall(PetscObjectSetName((PetscObject) U,  "U"));
307   PetscCall(PetscObjectSetName((PetscObject) A,  "Alpha"));
308   PetscCall(PetscObjectSetName((PetscObject) S,  "Sigma"));
309   PetscCall(PetscObjectSetName((PetscObject) UA, "UAlpha"));
310   PetscCall(PetscObjectSetName((PetscObject) UA2, "UAlpha2"));
311   PetscCall(VecSet(X, -111.));
312 
313   /* Setting u to [x,y,z]  and alpha to x^2+y^2+z^2 by writing in UAlpha then restricting to U and Alpha */
314   {
315     PetscSection sectionUA;
316     Vec          UALoc;
317     PetscSection coordSection;
318     Vec          coord;
319     PetscScalar *cval, *xyz;
320     PetscInt     clSize, i, j;
321 
322     PetscCall(DMGetLocalSection(dmUA, &sectionUA));
323     PetscCall(DMGetLocalVector(dmUA, &UALoc));
324     PetscCall(VecGetArray(UALoc, &cval));
325     PetscCall(DMGetCoordinateSection(dmUA, &coordSection));
326     PetscCall(DMGetCoordinatesLocal(dmUA, &coord));
327     PetscCall(DMPlexGetChart(dmUA, &pStart, &pEnd));
328 
329     for (p = pStart; p < pEnd; ++p) {
330       PetscInt dofUA, offUA;
331 
332       PetscCall(PetscSectionGetDof(sectionUA, p, &dofUA));
333       if (dofUA > 0) {
334         xyz=NULL;
335         PetscCall(PetscSectionGetOffset(sectionUA, p, &offUA));
336         PetscCall(DMPlexVecGetClosure(dmUA, coordSection, coord, p, &clSize, &xyz));
337         cval[offUA+sdim] = 0.;
338         for (i = 0; i < sdim; ++i) {
339           cval[offUA+i] = 0;
340           for (j = 0; j < clSize/sdim; ++j) {
341             cval[offUA+i] += xyz[j*sdim+i];
342           }
343           cval[offUA+i] = cval[offUA+i] * sdim / clSize;
344           cval[offUA+sdim] += PetscSqr(cval[offUA+i]);
345         }
346         PetscCall(DMPlexVecRestoreClosure(dmUA, coordSection, coord, p, &clSize, &xyz));
347       }
348     }
349     PetscCall(VecRestoreArray(UALoc, &cval));
350     PetscCall(DMLocalToGlobalBegin(dmUA, UALoc, INSERT_VALUES, UA));
351     PetscCall(DMLocalToGlobalEnd(dmUA, UALoc, INSERT_VALUES, UA));
352     PetscCall(DMRestoreLocalVector(dmUA, &UALoc));
353 
354     /* Update X */
355     PetscCall(VecISCopy(X, isUA, SCATTER_FORWARD, UA));
356     PetscCall(VecViewFromOptions(UA, NULL, "-ua_vec_view"));
357 
358     /* Restrict to U and Alpha */
359     PetscCall(VecISCopy(X, isU, SCATTER_REVERSE, U));
360     PetscCall(VecISCopy(X, isA, SCATTER_REVERSE, A));
361 
362     /* restrict to UA2 */
363     PetscCall(VecISCopy(X, isUA, SCATTER_REVERSE, UA2));
364     PetscCall(VecViewFromOptions(UA2, NULL, "-ua2_vec_view"));
365   }
366 
367   {
368     Vec          tmpVec;
369     PetscSection coordSection;
370     Vec          coord;
371     PetscReal    norm;
372     PetscReal    time = 1.234;
373 
374     /* Writing nodal variables to ExodusII file */
375     PetscCall(DMSetOutputSequenceNumber(dmU,0,time));
376     PetscCall(DMSetOutputSequenceNumber(dmA,0,time));
377 
378     PetscCall(VecView(U, viewer));
379     PetscCall(VecView(A, viewer));
380 
381     /* Saving U and Alpha in one shot.
382        For this, we need to cheat and change the Vec's name
383        Note that in the end we write variables one component at a time,
384        so that there is no real values in doing this
385     */
386 
387     PetscCall(DMSetOutputSequenceNumber(dmUA,1,time));
388     PetscCall(DMGetGlobalVector(dmUA, &tmpVec));
389     PetscCall(VecCopy(UA, tmpVec));
390     PetscCall(PetscObjectSetName((PetscObject) tmpVec, "U"));
391     PetscCall(VecView(tmpVec, viewer));
392     /* Reading nodal variables in Exodus file */
393     PetscCall(VecSet(tmpVec, -1000.0));
394     PetscCall(VecLoad(tmpVec, viewer));
395     PetscCall(VecAXPY(UA, -1.0, tmpVec));
396     PetscCall(VecNorm(UA, NORM_INFINITY, &norm));
397     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON,PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "UAlpha ||Vin - Vout|| = %g", (double) norm);
398     PetscCall(DMRestoreGlobalVector(dmUA, &tmpVec));
399 
400     /* same thing with the UA2 Vec obtained from the superDM */
401     PetscCall(DMGetGlobalVector(dmUA2, &tmpVec));
402     PetscCall(VecCopy(UA2, tmpVec));
403     PetscCall(PetscObjectSetName((PetscObject) tmpVec, "U"));
404     PetscCall(DMSetOutputSequenceNumber(dmUA2,2,time));
405     PetscCall(VecView(tmpVec, viewer));
406     /* Reading nodal variables in Exodus file */
407     PetscCall(VecSet(tmpVec, -1000.0));
408     PetscCall(VecLoad(tmpVec,viewer));
409     PetscCall(VecAXPY(UA2, -1.0, tmpVec));
410     PetscCall(VecNorm(UA2, NORM_INFINITY, &norm));
411     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON,PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "UAlpha2 ||Vin - Vout|| = %g", (double) norm);
412     PetscCall(DMRestoreGlobalVector(dmUA2, &tmpVec));
413 
414     /* Building and saving Sigma
415        We set sigma_0 = rank (to see partitioning)
416               sigma_1 = cell set ID
417               sigma_2 = x_coordinate of the cell center of mass
418     */
419     PetscCall(DMGetCoordinateSection(dmS, &coordSection));
420     PetscCall(DMGetCoordinatesLocal(dmS, &coord));
421     PetscCall(DMGetLabelIdIS(dmS, "Cell Sets", &csIS));
422     PetscCall(DMGetLabelSize(dmS, "Cell Sets", &numCS));
423     PetscCall(ISGetIndices(csIS, &csID));
424     for (set = 0; set < numCS; ++set) {
425       /* We know that all cells in a cell set have the same type, so we can dimension cval and xyz once for each cell set */
426       IS              cellIS;
427       const PetscInt *cellID;
428       PetscInt        numCells, cell;
429       PetscScalar    *cval = NULL, *xyz  = NULL;
430       PetscInt        clSize, cdimCoord, c;
431 
432       PetscCall(DMGetStratumIS(dmS, "Cell Sets", csID[set], &cellIS));
433       PetscCall(ISGetIndices(cellIS, &cellID));
434       PetscCall(ISGetSize(cellIS, &numCells));
435       for (cell = 0; cell < numCells; cell++) {
436         PetscCall(DMPlexVecGetClosure(dmS, NULL, S, cellID[cell], &clSize, &cval));
437         PetscCall(DMPlexVecGetClosure(dmS, coordSection, coord, cellID[cell], &cdimCoord, &xyz));
438         cval[0] = rank;
439         cval[1] = csID[set];
440         cval[2] = 0.;
441         for (c = 0; c < cdimCoord/sdim; c++) cval[2] += xyz[c*sdim];
442         cval[2] = cval[2] * sdim / cdimCoord;
443         PetscCall(DMPlexVecSetClosure(dmS, NULL, S, cellID[cell], cval, INSERT_ALL_VALUES));
444       }
445       PetscCall(DMPlexVecRestoreClosure(dmS, NULL, S, cellID[0], &clSize, &cval));
446       PetscCall(DMPlexVecRestoreClosure(dmS, coordSection, coord, cellID[0], NULL, &xyz));
447       PetscCall(ISRestoreIndices(cellIS, &cellID));
448       PetscCall(ISDestroy(&cellIS));
449     }
450     PetscCall(ISRestoreIndices(csIS, &csID));
451     PetscCall(ISDestroy(&csIS));
452     PetscCall(VecViewFromOptions(S, NULL, "-s_vec_view"));
453 
454     /* Writing zonal variables in Exodus file */
455     PetscCall(DMSetOutputSequenceNumber(dmS,0,time));
456     PetscCall(VecView(S,viewer));
457 
458     /* Reading zonal variables in Exodus file */
459     PetscCall(DMGetGlobalVector(dmS, &tmpVec));
460     PetscCall(VecSet(tmpVec, -1000.0));
461     PetscCall(PetscObjectSetName((PetscObject) tmpVec, "Sigma"));
462     PetscCall(VecLoad(tmpVec,viewer));
463     PetscCall(VecAXPY(S, -1.0, tmpVec));
464     PetscCall(VecNorm(S, NORM_INFINITY, &norm));
465     PetscCheck(norm <= PETSC_SQRT_MACHINE_EPSILON,PetscObjectComm((PetscObject) dm), PETSC_ERR_PLIB, "Sigma ||Vin - Vout|| = %g", (double) norm);
466     PetscCall(DMRestoreGlobalVector(dmS, &tmpVec));
467   }
468   PetscCall(PetscViewerDestroy(&viewer));
469 
470   PetscCall(DMRestoreGlobalVector(dmUA2, &UA2));
471   PetscCall(DMRestoreGlobalVector(dmUA, &UA));
472   PetscCall(DMRestoreGlobalVector(dmS,  &S));
473   PetscCall(DMRestoreGlobalVector(dmA,  &A));
474   PetscCall(DMRestoreGlobalVector(dmU,  &U));
475   PetscCall(DMRestoreGlobalVector(dm,   &X));
476   PetscCall(DMDestroy(&dmU)); PetscCall(ISDestroy(&isU));
477   PetscCall(DMDestroy(&dmA)); PetscCall(ISDestroy(&isA));
478   PetscCall(DMDestroy(&dmS)); PetscCall(ISDestroy(&isS));
479   PetscCall(DMDestroy(&dmUA));PetscCall(ISDestroy(&isUA));
480   PetscCall(DMDestroy(&dmUA2));
481   PetscCall(DMDestroy(&dm));
482   PetscCall(PetscFree2(pStartDepth, pEndDepth));
483   PetscCall(PetscFinalize());
484   return 0;
485 }
486 
487 /*TEST
488 
489   build:
490     requires: exodusii pnetcdf !complex
491   # 2D seq
492   test:
493     suffix: 0
494     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
495     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
496   test:
497     suffix: 1
498     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
499   test:
500     suffix: 2
501     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
502     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
503   test:
504     suffix: 3
505     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
506   test:
507     suffix: 4
508     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
509   test:
510     suffix: 5
511     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
512 
513   # 2D par
514   test:
515     suffix: 6
516     nsize: 2
517     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
518     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
519   test:
520     suffix: 7
521     nsize: 2
522     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
523   test:
524     suffix: 8
525     nsize: 2
526     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
527     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: invalid dimension ID or name
528   test:
529     suffix: 9
530     nsize: 2
531     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
532   test:
533     suffix: 10
534     nsize: 2
535     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
536   test:
537     # Something is now broken with parallel read/write for wahtever shape H is
538     TODO: broken
539     suffix: 11
540     nsize: 2
541     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
542 
543   #3d seq
544   test:
545     suffix: 12
546     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
547   test:
548     suffix: 13
549     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
550     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
551   test:
552     suffix: 14
553     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
554   test:
555     suffix: 15
556     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
557     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
558   #3d par
559   test:
560     suffix: 16
561     nsize: 2
562     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
563   test:
564     suffix: 17
565     nsize: 2
566     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1
567     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
568   test:
569     suffix: 18
570     nsize: 2
571     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
572   test:
573     suffix: 19
574     nsize: 2
575     args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2
576     #TODO: bug in call to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints
577 
578 TEST*/
579