xref: /petsc/src/dm/impls/da/grvtk.c (revision dce8aeba1c9b69b19f651c53d8a6b674bd7e9cbd)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da, PetscBool *fieldsnamed)
12 {
13   PetscInt f, bs;
14 
15   PetscFunctionBegin;
16   *fieldsnamed = PETSC_FALSE;
17   PetscCall(DMDAGetDof(da, &bs));
18   for (f = 0; f < bs; ++f) {
19     const char *fieldname;
20     PetscCall(DMDAGetFieldName(da, f, &fieldname));
21     if (fieldname) {
22       *fieldsnamed = PETSC_TRUE;
23       break;
24     }
25   }
26   PetscFunctionReturn(0);
27 }
28 
29 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da, PetscViewer viewer)
30 {
31   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
32 #if defined(PETSC_USE_REAL_SINGLE)
33   const char precision[] = "Float32";
34 #elif defined(PETSC_USE_REAL_DOUBLE)
35   const char precision[] = "Float64";
36 #else
37   const char precision[] = "UnknownPrecision";
38 #endif
39   MPI_Comm                 comm;
40   Vec                      Coords;
41   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
42   PetscViewerVTKObjectLink link;
43   FILE                    *fp;
44   PetscMPIInt              rank, size, tag;
45   DMDALocalInfo            info;
46   PetscInt                 dim, mx, my, mz, cdim, bs, boffset, maxnnodes, maxbs, i, j, k, r;
47   PetscInt                 rloc[6], (*grloc)[6] = NULL;
48   PetscScalar             *array, *array2;
49 
50   PetscFunctionBegin;
51   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
52   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
53   PetscCallMPI(MPI_Comm_size(comm, &size));
54   PetscCallMPI(MPI_Comm_rank(comm, &rank));
55   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
56   PetscCall(DMDAGetLocalInfo(da, &info));
57   PetscCall(DMGetCoordinates(da, &Coords));
58   if (Coords) {
59     PetscInt csize;
60     PetscCall(VecGetSize(Coords, &csize));
61     PetscCheck(csize % (mx * my * mz) == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
62     cdim = csize / (mx * my * mz);
63     PetscCheck(cdim >= dim && cdim <= 3, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
64   } else {
65     cdim = dim;
66   }
67 
68   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
69   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
70   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order));
71   PetscCall(PetscFPrintf(comm, fp, "  <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));
72 
73   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
74   rloc[0] = info.xs;
75   rloc[1] = info.xm;
76   rloc[2] = info.ys;
77   rloc[3] = info.ym;
78   rloc[4] = info.zs;
79   rloc[5] = info.zm;
80   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm));
81 
82   /* Write XML header */
83   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
84   maxbs     = 0; /* Used for the temporary array size on rank 0 */
85   boffset   = 0; /* Offset into binary file */
86   for (r = 0; r < size; r++) {
87     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
88     if (rank == 0) {
89       xs     = grloc[r][0];
90       xm     = grloc[r][1];
91       ys     = grloc[r][2];
92       ym     = grloc[r][3];
93       zs     = grloc[r][4];
94       zm     = grloc[r][5];
95       nnodes = xm * ym * zm;
96     }
97     maxnnodes = PetscMax(maxnnodes, nnodes);
98     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
99     PetscCall(PetscFPrintf(comm, fp, "      <Points>\n"));
100     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
101     boffset += 3 * nnodes * sizeof(PetscScalar) + sizeof(int);
102     PetscCall(PetscFPrintf(comm, fp, "      </Points>\n"));
103 
104     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
105     for (link = vtk->link; link; link = link->next) {
106       Vec         X = (Vec)link->vec;
107       PetscInt    bs, f;
108       DM          daCurr;
109       PetscBool   fieldsnamed;
110       const char *vecname = "Unnamed";
111 
112       PetscCall(VecGetDM(X, &daCurr));
113       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
114       maxbs = PetscMax(maxbs, bs);
115 
116       if (((PetscObject)X)->name || link != vtk->link) PetscCall(PetscObjectGetName((PetscObject)X, &vecname));
117 
118       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
119       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
120       if (fieldsnamed) {
121         for (f = 0; f < bs; f++) {
122           char        buf[256];
123           const char *fieldname;
124           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
125           if (!fieldname) {
126             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
127             fieldname = buf;
128           }
129           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, fieldname, boffset));
130           boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
131         }
132       } else {
133         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, bs, boffset));
134         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
135       }
136     }
137     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
138     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
139   }
140   PetscCall(PetscFPrintf(comm, fp, "  </StructuredGrid>\n"));
141   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
142   PetscCall(PetscFPrintf(comm, fp, "_"));
143 
144   /* Now write the arrays. */
145   tag = ((PetscObject)viewer)->tag;
146   PetscCall(PetscMalloc2(maxnnodes * PetscMax(3, maxbs), &array, maxnnodes * PetscMax(3, maxbs), &array2));
147   for (r = 0; r < size; r++) {
148     MPI_Status status;
149     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
150     if (rank == 0) {
151       xs     = grloc[r][0];
152       xm     = grloc[r][1];
153       ys     = grloc[r][2];
154       ym     = grloc[r][3];
155       zs     = grloc[r][4];
156       zm     = grloc[r][5];
157       nnodes = xm * ym * zm;
158     } else if (r == rank) {
159       nnodes = info.xm * info.ym * info.zm;
160     }
161 
162     /* Write the coordinates */
163     if (Coords) {
164       const PetscScalar *coords;
165       PetscCall(VecGetArrayRead(Coords, &coords));
166       if (rank == 0) {
167         if (r) {
168           PetscMPIInt nn;
169           PetscCallMPI(MPI_Recv(array, nnodes * cdim, MPIU_SCALAR, r, tag, comm, &status));
170           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
171           PetscCheck(nn == nnodes * cdim, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
172         } else PetscCall(PetscArraycpy(array, coords, nnodes * cdim));
173         /* Transpose coordinates to VTK (C-style) ordering */
174         for (k = 0; k < zm; k++) {
175           for (j = 0; j < ym; j++) {
176             for (i = 0; i < xm; i++) {
177               PetscInt Iloc        = i + xm * (j + ym * k);
178               array2[Iloc * 3 + 0] = array[Iloc * cdim + 0];
179               array2[Iloc * 3 + 1] = cdim > 1 ? array[Iloc * cdim + 1] : 0.0;
180               array2[Iloc * 3 + 2] = cdim > 2 ? array[Iloc * cdim + 2] : 0.0;
181             }
182           }
183         }
184       } else if (r == rank) {
185         PetscCallMPI(MPI_Send((void *)coords, nnodes * cdim, MPIU_SCALAR, 0, tag, comm));
186       }
187       PetscCall(VecRestoreArrayRead(Coords, &coords));
188     } else { /* Fabricate some coordinates using grid index */
189       for (k = 0; k < zm; k++) {
190         for (j = 0; j < ym; j++) {
191           for (i = 0; i < xm; i++) {
192             PetscInt Iloc        = i + xm * (j + ym * k);
193             array2[Iloc * 3 + 0] = xs + i;
194             array2[Iloc * 3 + 1] = ys + j;
195             array2[Iloc * 3 + 2] = zs + k;
196           }
197         }
198       }
199     }
200     PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes * 3, MPIU_SCALAR));
201 
202     /* Write each of the objects queued up for this file */
203     for (link = vtk->link; link; link = link->next) {
204       Vec                X = (Vec)link->vec;
205       const PetscScalar *x;
206       PetscInt           bs, f;
207       DM                 daCurr;
208       PetscBool          fieldsnamed;
209       PetscCall(VecGetDM(X, &daCurr));
210       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
211       PetscCall(VecGetArrayRead(X, &x));
212       if (rank == 0) {
213         if (r) {
214           PetscMPIInt nn;
215           PetscCallMPI(MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
216           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
217           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %" PetscInt_FMT, r);
218         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
219 
220         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
221         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
222         if (fieldsnamed) {
223           for (f = 0; f < bs; f++) {
224             /* Extract and transpose the f'th field */
225             for (k = 0; k < zm; k++) {
226               for (j = 0; j < ym; j++) {
227                 for (i = 0; i < xm; i++) {
228                   PetscInt Iloc = i + xm * (j + ym * k);
229                   array2[Iloc]  = array[Iloc * bs + f];
230                 }
231               }
232             }
233             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
234           }
235         } else PetscCall(PetscViewerVTKFWrite(viewer, fp, array, bs * nnodes, MPIU_SCALAR));
236       } else if (r == rank) PetscCallMPI(MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
237       PetscCall(VecRestoreArrayRead(X, &x));
238     }
239   }
240   PetscCall(PetscFree2(array, array2));
241   PetscCall(PetscFree(grloc));
242 
243   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
244   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
245   PetscCall(PetscFClose(comm, fp));
246   PetscFunctionReturn(0);
247 }
248 
249 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer)
250 {
251   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
252 #if defined(PETSC_USE_REAL_SINGLE)
253   const char precision[] = "Float32";
254 #elif defined(PETSC_USE_REAL_DOUBLE)
255   const char precision[] = "Float64";
256 #else
257   const char precision[] = "UnknownPrecision";
258 #endif
259   MPI_Comm                 comm;
260   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
261   PetscViewerVTKObjectLink link;
262   FILE                    *fp;
263   PetscMPIInt              rank, size, tag;
264   DMDALocalInfo            info;
265   PetscInt                 dim, mx, my, mz, boffset, maxnnodes, maxbs, i, j, k, r;
266   PetscInt                 rloc[6], (*grloc)[6] = NULL;
267   PetscScalar             *array, *array2;
268 
269   PetscFunctionBegin;
270   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
271   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
272   PetscCallMPI(MPI_Comm_size(comm, &size));
273   PetscCallMPI(MPI_Comm_rank(comm, &rank));
274   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
275   PetscCall(DMDAGetLocalInfo(da, &info));
276   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
277   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
278   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order));
279   PetscCall(PetscFPrintf(comm, fp, "  <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));
280 
281   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
282   rloc[0] = info.xs;
283   rloc[1] = info.xm;
284   rloc[2] = info.ys;
285   rloc[3] = info.ym;
286   rloc[4] = info.zs;
287   rloc[5] = info.zm;
288   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm));
289 
290   /* Write XML header */
291   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
292   maxbs     = 0; /* Used for the temporary array size on rank 0 */
293   boffset   = 0; /* Offset into binary file */
294   for (r = 0; r < size; r++) {
295     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
296     if (rank == 0) {
297       xs     = grloc[r][0];
298       xm     = grloc[r][1];
299       ys     = grloc[r][2];
300       ym     = grloc[r][3];
301       zs     = grloc[r][4];
302       zm     = grloc[r][5];
303       nnodes = xm * ym * zm;
304     }
305     maxnnodes = PetscMax(maxnnodes, nnodes);
306     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
307     PetscCall(PetscFPrintf(comm, fp, "      <Coordinates>\n"));
308     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
309     boffset += xm * sizeof(PetscScalar) + sizeof(int);
310     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
311     boffset += ym * sizeof(PetscScalar) + sizeof(int);
312     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
313     boffset += zm * sizeof(PetscScalar) + sizeof(int);
314     PetscCall(PetscFPrintf(comm, fp, "      </Coordinates>\n"));
315     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
316     for (link = vtk->link; link; link = link->next) {
317       Vec         X = (Vec)link->vec;
318       PetscInt    bs, f;
319       DM          daCurr;
320       PetscBool   fieldsnamed;
321       const char *vecname = "Unnamed";
322 
323       PetscCall(VecGetDM(X, &daCurr));
324       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
325       maxbs = PetscMax(maxbs, bs);
326       if (((PetscObject)X)->name || link != vtk->link) PetscCall(PetscObjectGetName((PetscObject)X, &vecname));
327 
328       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
329       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
330       if (fieldsnamed) {
331         for (f = 0; f < bs; f++) {
332           char        buf[256];
333           const char *fieldname;
334           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
335           if (!fieldname) {
336             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
337             fieldname = buf;
338           }
339           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, fieldname, boffset));
340           boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
341         }
342       } else {
343         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, bs, boffset));
344         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
345       }
346     }
347     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
348     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
349   }
350   PetscCall(PetscFPrintf(comm, fp, "  </RectilinearGrid>\n"));
351   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
352   PetscCall(PetscFPrintf(comm, fp, "_"));
353 
354   /* Now write the arrays. */
355   tag = ((PetscObject)viewer)->tag;
356   PetscCall(PetscMalloc2(PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array, PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array2));
357   for (r = 0; r < size; r++) {
358     MPI_Status status;
359     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
360     if (rank == 0) {
361       xs     = grloc[r][0];
362       xm     = grloc[r][1];
363       ys     = grloc[r][2];
364       ym     = grloc[r][3];
365       zs     = grloc[r][4];
366       zm     = grloc[r][5];
367       nnodes = xm * ym * zm;
368     } else if (r == rank) {
369       nnodes = info.xm * info.ym * info.zm;
370     }
371     { /* Write the coordinates */
372       Vec Coords;
373 
374       PetscCall(DMGetCoordinates(da, &Coords));
375       if (Coords) {
376         const PetscScalar *coords;
377         PetscCall(VecGetArrayRead(Coords, &coords));
378         if (rank == 0) {
379           if (r) {
380             PetscMPIInt nn;
381             PetscCallMPI(MPI_Recv(array, xm + ym + zm, MPIU_SCALAR, r, tag, comm, &status));
382             PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
383             PetscCheck(nn == xm + ym + zm, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
384           } else {
385             /* x coordinates */
386             for (j = 0, k = 0, i = 0; i < xm; i++) {
387               PetscInt Iloc = i + xm * (j + ym * k);
388               array[i]      = coords[Iloc * dim + 0];
389             }
390             /* y coordinates */
391             for (i = 0, k = 0, j = 0; j < ym; j++) {
392               PetscInt Iloc = i + xm * (j + ym * k);
393               array[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
394             }
395             /* z coordinates */
396             for (i = 0, j = 0, k = 0; k < zm; k++) {
397               PetscInt Iloc      = i + xm * (j + ym * k);
398               array[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
399             }
400           }
401         } else if (r == rank) {
402           xm = info.xm;
403           ym = info.ym;
404           zm = info.zm;
405           for (j = 0, k = 0, i = 0; i < xm; i++) {
406             PetscInt Iloc = i + xm * (j + ym * k);
407             array2[i]     = coords[Iloc * dim + 0];
408           }
409           for (i = 0, k = 0, j = 0; j < ym; j++) {
410             PetscInt Iloc  = i + xm * (j + ym * k);
411             array2[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
412           }
413           for (i = 0, j = 0, k = 0; k < zm; k++) {
414             PetscInt Iloc       = i + xm * (j + ym * k);
415             array2[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
416           }
417           PetscCallMPI(MPI_Send((void *)array2, xm + ym + zm, MPIU_SCALAR, 0, tag, comm));
418         }
419         PetscCall(VecRestoreArrayRead(Coords, &coords));
420       } else { /* Fabricate some coordinates using grid index */
421         for (i = 0; i < xm; i++) array[i] = xs + i;
422         for (j = 0; j < ym; j++) array[j + xm] = ys + j;
423         for (k = 0; k < zm; k++) array[k + xm + ym] = zs + k;
424       }
425       if (rank == 0) {
426         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[0]), xm, MPIU_SCALAR));
427         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[xm]), ym, MPIU_SCALAR));
428         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[xm + ym]), zm, MPIU_SCALAR));
429       }
430     }
431 
432     /* Write each of the objects queued up for this file */
433     for (link = vtk->link; link; link = link->next) {
434       Vec                X = (Vec)link->vec;
435       const PetscScalar *x;
436       PetscInt           bs, f;
437       DM                 daCurr;
438       PetscBool          fieldsnamed;
439       PetscCall(VecGetDM(X, &daCurr));
440       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
441 
442       PetscCall(VecGetArrayRead(X, &x));
443       if (rank == 0) {
444         if (r) {
445           PetscMPIInt nn;
446           PetscCallMPI(MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
447           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
448           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %" PetscInt_FMT, r);
449         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
450         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
451         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
452         if (fieldsnamed) {
453           for (f = 0; f < bs; f++) {
454             /* Extract and transpose the f'th field */
455             for (k = 0; k < zm; k++) {
456               for (j = 0; j < ym; j++) {
457                 for (i = 0; i < xm; i++) {
458                   PetscInt Iloc = i + xm * (j + ym * k);
459                   array2[Iloc]  = array[Iloc * bs + f];
460                 }
461               }
462             }
463             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
464           }
465         }
466         PetscCall(PetscViewerVTKFWrite(viewer, fp, array, nnodes * bs, MPIU_SCALAR));
467 
468       } else if (r == rank) {
469         PetscCallMPI(MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
470       }
471       PetscCall(VecRestoreArrayRead(X, &x));
472     }
473   }
474   PetscCall(PetscFree2(array, array2));
475   PetscCall(PetscFree(grloc));
476 
477   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
478   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
479   PetscCall(PetscFClose(comm, fp));
480   PetscFunctionReturn(0);
481 }
482 
483 /*@C
484    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
485 
486    Collective
487 
488    Input Parameters:
489 +  odm - `DMDA` specifying the grid layout, passed as a `PetscObject`
490 -  viewer - viewer of type `PETSCVIEWERVTK`
491 
492    Level: developer
493 
494    Notes:
495    This function is a callback used by the VTK viewer to actually write the file.
496    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
497    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
498 
499    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
500    fields are written. Otherwise, a single multi-dof (vector) field is written.
501 
502 .seealso: `DMDA`, `DM`, `PETSCVIEWERVTK`
503 @*/
504 PetscErrorCode DMDAVTKWriteAll(PetscObject odm, PetscViewer viewer)
505 {
506   DM        dm = (DM)odm;
507   PetscBool isvtk;
508 
509   PetscFunctionBegin;
510   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
511   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
512   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
513   PetscCheck(isvtk, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_INCOMP, "Cannot use viewer type %s", ((PetscObject)viewer)->type_name);
514   switch (viewer->format) {
515   case PETSC_VIEWER_VTK_VTS:
516     PetscCall(DMDAVTKWriteAll_VTS(dm, viewer));
517     break;
518   case PETSC_VIEWER_VTK_VTR:
519     PetscCall(DMDAVTKWriteAll_VTR(dm, viewer));
520     break;
521   default:
522     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]);
523   }
524   PetscFunctionReturn(0);
525 }
526