xref: /petsc/src/dm/impls/da/grvtk.c (revision 1b37a2a7cc4a4fb30c3e967db1c694c0a1013f51)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da, PetscBool *fieldsnamed)
12 {
13   PetscInt f, bs;
14 
15   PetscFunctionBegin;
16   *fieldsnamed = PETSC_FALSE;
17   PetscCall(DMDAGetDof(da, &bs));
18   for (f = 0; f < bs; ++f) {
19     const char *fieldname;
20     PetscCall(DMDAGetFieldName(da, f, &fieldname));
21     if (fieldname) {
22       *fieldsnamed = PETSC_TRUE;
23       break;
24     }
25   }
26   PetscFunctionReturn(PETSC_SUCCESS);
27 }
28 
29 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da, PetscViewer viewer)
30 {
31   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
32 #if defined(PETSC_USE_REAL_SINGLE)
33   const char precision[] = "Float32";
34 #elif defined(PETSC_USE_REAL_DOUBLE)
35   const char precision[] = "Float64";
36 #else
37   const char precision[] = "UnknownPrecision";
38 #endif
39   MPI_Comm                 comm;
40   Vec                      Coords;
41   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
42   PetscViewerVTKObjectLink link;
43   FILE                    *fp;
44   PetscMPIInt              rank, size, tag;
45   DMDALocalInfo            info;
46   PetscInt                 dim, mx, my, mz, cdim, bs, maxnnodes, maxbs, i, j, k, r;
47   PetscInt64               boffset;
48   PetscInt                 rloc[6], (*grloc)[6] = NULL;
49   PetscScalar             *array, *array2;
50 
51   PetscFunctionBegin;
52   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
53   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
54   PetscCallMPI(MPI_Comm_size(comm, &size));
55   PetscCallMPI(MPI_Comm_rank(comm, &rank));
56   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
57   PetscCall(DMDAGetLocalInfo(da, &info));
58   PetscCall(DMGetCoordinates(da, &Coords));
59   if (Coords) {
60     PetscInt csize;
61     PetscCall(VecGetSize(Coords, &csize));
62     PetscCheck(csize % (mx * my * mz) == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
63     cdim = csize / (mx * my * mz);
64     PetscCheck(cdim >= dim && cdim <= 3, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
65   } else {
66     cdim = dim;
67   }
68 
69   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
70   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
71   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\" header_type=\"UInt64\">\n", byte_order));
72   PetscCall(PetscFPrintf(comm, fp, "  <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));
73 
74   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
75   rloc[0] = info.xs;
76   rloc[1] = info.xm;
77   rloc[2] = info.ys;
78   rloc[3] = info.ym;
79   rloc[4] = info.zs;
80   rloc[5] = info.zm;
81   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, rank == 0 ? grloc[0] : NULL, 6, MPIU_INT, 0, comm));
82 
83   /* Write XML header */
84   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
85   maxbs     = 0; /* Used for the temporary array size on rank 0 */
86   boffset   = 0; /* Offset into binary file */
87   for (r = 0; r < size; r++) {
88     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
89     if (rank == 0) {
90       xs     = grloc[r][0];
91       xm     = grloc[r][1];
92       ys     = grloc[r][2];
93       ym     = grloc[r][3];
94       zs     = grloc[r][4];
95       zm     = grloc[r][5];
96       nnodes = xm * ym * zm;
97     }
98     maxnnodes = PetscMax(maxnnodes, nnodes);
99     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
100     PetscCall(PetscFPrintf(comm, fp, "      <Points>\n"));
101     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
102     boffset += 3 * nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
103     PetscCall(PetscFPrintf(comm, fp, "      </Points>\n"));
104 
105     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
106     for (link = vtk->link; link; link = link->next) {
107       Vec         X = (Vec)link->vec;
108       PetscInt    bs, f;
109       DM          daCurr;
110       PetscBool   fieldsnamed;
111       const char *vecname = "Unnamed";
112 
113       PetscCall(VecGetDM(X, &daCurr));
114       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
115       maxbs = PetscMax(maxbs, bs);
116 
117       if (((PetscObject)X)->name || link != vtk->link) PetscCall(PetscObjectGetName((PetscObject)X, &vecname));
118 
119       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
120       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
121       if (fieldsnamed) {
122         for (f = 0; f < bs; f++) {
123           char        buf[256];
124           const char *fieldname;
125           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
126           if (!fieldname) {
127             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
128             fieldname = buf;
129           }
130           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, fieldname, boffset));
131           boffset += nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
132         }
133       } else {
134         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, bs, boffset));
135         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
136       }
137     }
138     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
139     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
140   }
141   PetscCall(PetscFPrintf(comm, fp, "  </StructuredGrid>\n"));
142   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
143   PetscCall(PetscFPrintf(comm, fp, "_"));
144 
145   /* Now write the arrays. */
146   tag = ((PetscObject)viewer)->tag;
147   PetscCall(PetscMalloc2(maxnnodes * PetscMax(3, maxbs), &array, maxnnodes * PetscMax(3, maxbs), &array2));
148   for (r = 0; r < size; r++) {
149     MPI_Status status;
150     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
151     if (rank == 0) {
152       xs     = grloc[r][0];
153       xm     = grloc[r][1];
154       ys     = grloc[r][2];
155       ym     = grloc[r][3];
156       zs     = grloc[r][4];
157       zm     = grloc[r][5];
158       nnodes = xm * ym * zm;
159     } else if (r == rank) {
160       nnodes = info.xm * info.ym * info.zm;
161     }
162 
163     /* Write the coordinates */
164     if (Coords) {
165       const PetscScalar *coords;
166       PetscCall(VecGetArrayRead(Coords, &coords));
167       if (rank == 0) {
168         if (r) {
169           PetscMPIInt nn;
170           PetscCallMPI(MPI_Recv(array, nnodes * cdim, MPIU_SCALAR, r, tag, comm, &status));
171           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
172           PetscCheck(nn == nnodes * cdim, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
173         } else PetscCall(PetscArraycpy(array, coords, nnodes * cdim));
174         /* Transpose coordinates to VTK (C-style) ordering */
175         for (k = 0; k < zm; k++) {
176           for (j = 0; j < ym; j++) {
177             for (i = 0; i < xm; i++) {
178               PetscInt Iloc        = i + xm * (j + ym * k);
179               array2[Iloc * 3 + 0] = array[Iloc * cdim + 0];
180               array2[Iloc * 3 + 1] = cdim > 1 ? array[Iloc * cdim + 1] : 0.0;
181               array2[Iloc * 3 + 2] = cdim > 2 ? array[Iloc * cdim + 2] : 0.0;
182             }
183           }
184         }
185       } else if (r == rank) {
186         PetscCallMPI(MPI_Send((void *)coords, nnodes * cdim, MPIU_SCALAR, 0, tag, comm));
187       }
188       PetscCall(VecRestoreArrayRead(Coords, &coords));
189     } else { /* Fabricate some coordinates using grid index */
190       for (k = 0; k < zm; k++) {
191         for (j = 0; j < ym; j++) {
192           for (i = 0; i < xm; i++) {
193             PetscInt Iloc        = i + xm * (j + ym * k);
194             array2[Iloc * 3 + 0] = xs + i;
195             array2[Iloc * 3 + 1] = ys + j;
196             array2[Iloc * 3 + 2] = zs + k;
197           }
198         }
199       }
200     }
201     PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes * 3, MPIU_SCALAR));
202 
203     /* Write each of the objects queued up for this file */
204     for (link = vtk->link; link; link = link->next) {
205       Vec                X = (Vec)link->vec;
206       const PetscScalar *x;
207       PetscInt           bs, f;
208       DM                 daCurr;
209       PetscBool          fieldsnamed;
210       PetscCall(VecGetDM(X, &daCurr));
211       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
212       PetscCall(VecGetArrayRead(X, &x));
213       if (rank == 0) {
214         if (r) {
215           PetscMPIInt nn;
216           PetscCallMPI(MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
217           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
218           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %" PetscInt_FMT, r);
219         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
220 
221         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
222         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
223         if (fieldsnamed) {
224           for (f = 0; f < bs; f++) {
225             /* Extract and transpose the f'th field */
226             for (k = 0; k < zm; k++) {
227               for (j = 0; j < ym; j++) {
228                 for (i = 0; i < xm; i++) {
229                   PetscInt Iloc = i + xm * (j + ym * k);
230                   array2[Iloc]  = array[Iloc * bs + f];
231                 }
232               }
233             }
234             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
235           }
236         } else PetscCall(PetscViewerVTKFWrite(viewer, fp, array, bs * nnodes, MPIU_SCALAR));
237       } else if (r == rank) PetscCallMPI(MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
238       PetscCall(VecRestoreArrayRead(X, &x));
239     }
240   }
241   PetscCall(PetscFree2(array, array2));
242   PetscCall(PetscFree(grloc));
243 
244   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
245   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
246   PetscCall(PetscFClose(comm, fp));
247   PetscFunctionReturn(PETSC_SUCCESS);
248 }
249 
250 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer)
251 {
252   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
253 #if defined(PETSC_USE_REAL_SINGLE)
254   const char precision[] = "Float32";
255 #elif defined(PETSC_USE_REAL_DOUBLE)
256   const char precision[] = "Float64";
257 #else
258   const char precision[] = "UnknownPrecision";
259 #endif
260   MPI_Comm                 comm;
261   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
262   PetscViewerVTKObjectLink link;
263   FILE                    *fp;
264   PetscMPIInt              rank, size, tag;
265   DMDALocalInfo            info;
266   PetscInt                 dim, mx, my, mz, maxnnodes, maxbs, i, j, k, r;
267   PetscInt64               boffset;
268   PetscInt                 rloc[6], (*grloc)[6] = NULL;
269   PetscScalar             *array, *array2;
270 
271   PetscFunctionBegin;
272   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
273   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
274   PetscCallMPI(MPI_Comm_size(comm, &size));
275   PetscCallMPI(MPI_Comm_rank(comm, &rank));
276   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
277   PetscCall(DMDAGetLocalInfo(da, &info));
278   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
279   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
280   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\" header_type=\"UInt64\">\n", byte_order));
281   PetscCall(PetscFPrintf(comm, fp, "  <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));
282 
283   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
284   rloc[0] = info.xs;
285   rloc[1] = info.xm;
286   rloc[2] = info.ys;
287   rloc[3] = info.ym;
288   rloc[4] = info.zs;
289   rloc[5] = info.zm;
290   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, rank == 0 ? grloc[0] : NULL, 6, MPIU_INT, 0, comm));
291 
292   /* Write XML header */
293   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
294   maxbs     = 0; /* Used for the temporary array size on rank 0 */
295   boffset   = 0; /* Offset into binary file */
296   for (r = 0; r < size; r++) {
297     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
298     if (rank == 0) {
299       xs     = grloc[r][0];
300       xm     = grloc[r][1];
301       ys     = grloc[r][2];
302       ym     = grloc[r][3];
303       zs     = grloc[r][4];
304       zm     = grloc[r][5];
305       nnodes = xm * ym * zm;
306     }
307     maxnnodes = PetscMax(maxnnodes, nnodes);
308     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
309     PetscCall(PetscFPrintf(comm, fp, "      <Coordinates>\n"));
310     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
311     boffset += xm * sizeof(PetscScalar) + sizeof(PetscInt64);
312     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
313     boffset += ym * sizeof(PetscScalar) + sizeof(PetscInt64);
314     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%" PetscInt64_FMT "\" />\n", precision, boffset));
315     boffset += zm * sizeof(PetscScalar) + sizeof(PetscInt64);
316     PetscCall(PetscFPrintf(comm, fp, "      </Coordinates>\n"));
317     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
318     for (link = vtk->link; link; link = link->next) {
319       Vec         X = (Vec)link->vec;
320       PetscInt    bs, f;
321       DM          daCurr;
322       PetscBool   fieldsnamed;
323       const char *vecname = "Unnamed";
324 
325       PetscCall(VecGetDM(X, &daCurr));
326       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
327       maxbs = PetscMax(maxbs, bs);
328       if (((PetscObject)X)->name || link != vtk->link) PetscCall(PetscObjectGetName((PetscObject)X, &vecname));
329 
330       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
331       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
332       if (fieldsnamed) {
333         for (f = 0; f < bs; f++) {
334           char        buf[256];
335           const char *fieldname;
336           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
337           if (!fieldname) {
338             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
339             fieldname = buf;
340           }
341           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, fieldname, boffset));
342           boffset += nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
343         }
344       } else {
345         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt64_FMT "\" />\n", precision, vecname, bs, boffset));
346         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(PetscInt64);
347       }
348     }
349     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
350     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
351   }
352   PetscCall(PetscFPrintf(comm, fp, "  </RectilinearGrid>\n"));
353   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
354   PetscCall(PetscFPrintf(comm, fp, "_"));
355 
356   /* Now write the arrays. */
357   tag = ((PetscObject)viewer)->tag;
358   PetscCall(PetscMalloc2(PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array, PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array2));
359   for (r = 0; r < size; r++) {
360     MPI_Status status;
361     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
362     if (rank == 0) {
363       xs     = grloc[r][0];
364       xm     = grloc[r][1];
365       ys     = grloc[r][2];
366       ym     = grloc[r][3];
367       zs     = grloc[r][4];
368       zm     = grloc[r][5];
369       nnodes = xm * ym * zm;
370     } else if (r == rank) {
371       nnodes = info.xm * info.ym * info.zm;
372     }
373     { /* Write the coordinates */
374       Vec Coords;
375 
376       PetscCall(DMGetCoordinates(da, &Coords));
377       if (Coords) {
378         const PetscScalar *coords;
379         PetscCall(VecGetArrayRead(Coords, &coords));
380         if (rank == 0) {
381           if (r) {
382             PetscMPIInt nn;
383             PetscCallMPI(MPI_Recv(array, xm + ym + zm, MPIU_SCALAR, r, tag, comm, &status));
384             PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
385             PetscCheck(nn == xm + ym + zm, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
386           } else {
387             /* x coordinates */
388             for (j = 0, k = 0, i = 0; i < xm; i++) {
389               PetscInt Iloc = i + xm * (j + ym * k);
390               array[i]      = coords[Iloc * dim + 0];
391             }
392             /* y coordinates */
393             for (i = 0, k = 0, j = 0; j < ym; j++) {
394               PetscInt Iloc = i + xm * (j + ym * k);
395               array[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
396             }
397             /* z coordinates */
398             for (i = 0, j = 0, k = 0; k < zm; k++) {
399               PetscInt Iloc      = i + xm * (j + ym * k);
400               array[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
401             }
402           }
403         } else if (r == rank) {
404           xm = info.xm;
405           ym = info.ym;
406           zm = info.zm;
407           for (j = 0, k = 0, i = 0; i < xm; i++) {
408             PetscInt Iloc = i + xm * (j + ym * k);
409             array2[i]     = coords[Iloc * dim + 0];
410           }
411           for (i = 0, k = 0, j = 0; j < ym; j++) {
412             PetscInt Iloc  = i + xm * (j + ym * k);
413             array2[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
414           }
415           for (i = 0, j = 0, k = 0; k < zm; k++) {
416             PetscInt Iloc       = i + xm * (j + ym * k);
417             array2[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
418           }
419           PetscCallMPI(MPI_Send((void *)array2, xm + ym + zm, MPIU_SCALAR, 0, tag, comm));
420         }
421         PetscCall(VecRestoreArrayRead(Coords, &coords));
422       } else { /* Fabricate some coordinates using grid index */
423         for (i = 0; i < xm; i++) array[i] = xs + i;
424         for (j = 0; j < ym; j++) array[j + xm] = ys + j;
425         for (k = 0; k < zm; k++) array[k + xm + ym] = zs + k;
426       }
427       if (rank == 0) {
428         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[0]), xm, MPIU_SCALAR));
429         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[xm]), ym, MPIU_SCALAR));
430         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[xm + ym]), zm, MPIU_SCALAR));
431       }
432     }
433 
434     /* Write each of the objects queued up for this file */
435     for (link = vtk->link; link; link = link->next) {
436       Vec                X = (Vec)link->vec;
437       const PetscScalar *x;
438       PetscInt           bs, f;
439       DM                 daCurr;
440       PetscBool          fieldsnamed;
441       PetscCall(VecGetDM(X, &daCurr));
442       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
443 
444       PetscCall(VecGetArrayRead(X, &x));
445       if (rank == 0) {
446         if (r) {
447           PetscMPIInt nn;
448           PetscCallMPI(MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
449           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
450           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %" PetscInt_FMT, r);
451         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
452         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
453         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
454         if (fieldsnamed) {
455           for (f = 0; f < bs; f++) {
456             /* Extract and transpose the f'th field */
457             for (k = 0; k < zm; k++) {
458               for (j = 0; j < ym; j++) {
459                 for (i = 0; i < xm; i++) {
460                   PetscInt Iloc = i + xm * (j + ym * k);
461                   array2[Iloc]  = array[Iloc * bs + f];
462                 }
463               }
464             }
465             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
466           }
467         }
468         PetscCall(PetscViewerVTKFWrite(viewer, fp, array, nnodes * bs, MPIU_SCALAR));
469 
470       } else if (r == rank) {
471         PetscCallMPI(MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
472       }
473       PetscCall(VecRestoreArrayRead(X, &x));
474     }
475   }
476   PetscCall(PetscFree2(array, array2));
477   PetscCall(PetscFree(grloc));
478 
479   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
480   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
481   PetscCall(PetscFClose(comm, fp));
482   PetscFunctionReturn(PETSC_SUCCESS);
483 }
484 
485 /*@C
486   DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
487 
488   Collective
489 
490   Input Parameters:
491 + odm    - `DMDA` specifying the grid layout, passed as a `PetscObject`
492 - viewer - viewer of type `PETSCVIEWERVTK`
493 
494   Level: developer
495 
496   Notes:
497   This function is a callback used by the `PETSCVIEWERVTK` viewer to actually write the file.
498   The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
499   Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
500 
501   If any fields have been named (see e.g. `DMDASetFieldName()`), then individual scalar
502   fields are written. Otherwise, a single multi-dof (vector) field is written.
503 
504 .seealso: [](sec_struct), `DMDA`, `DM`, `PETSCVIEWERVTK`, `DMDASetFieldName()`
505 @*/
506 PetscErrorCode DMDAVTKWriteAll(PetscObject odm, PetscViewer viewer)
507 {
508   DM        dm = (DM)odm;
509   PetscBool isvtk;
510 
511   PetscFunctionBegin;
512   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
513   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
514   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
515   PetscCheck(isvtk, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_INCOMP, "Cannot use viewer type %s", ((PetscObject)viewer)->type_name);
516   switch (viewer->format) {
517   case PETSC_VIEWER_VTK_VTS:
518     PetscCall(DMDAVTKWriteAll_VTS(dm, viewer));
519     break;
520   case PETSC_VIEWER_VTK_VTR:
521     PetscCall(DMDAVTKWriteAll_VTR(dm, viewer));
522     break;
523   default:
524     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]);
525   }
526   PetscFunctionReturn(PETSC_SUCCESS);
527 }
528