xref: /petsc/src/dm/impls/da/grvtk.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da, PetscBool *fieldsnamed) {
12   PetscInt f, bs;
13 
14   PetscFunctionBegin;
15   *fieldsnamed = PETSC_FALSE;
16   PetscCall(DMDAGetDof(da, &bs));
17   for (f = 0; f < bs; ++f) {
18     const char *fieldname;
19     PetscCall(DMDAGetFieldName(da, f, &fieldname));
20     if (fieldname) {
21       *fieldsnamed = PETSC_TRUE;
22       break;
23     }
24   }
25   PetscFunctionReturn(0);
26 }
27 
28 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da, PetscViewer viewer) {
29   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
30 #if defined(PETSC_USE_REAL_SINGLE)
31   const char precision[] = "Float32";
32 #elif defined(PETSC_USE_REAL_DOUBLE)
33   const char precision[] = "Float64";
34 #else
35   const char precision[] = "UnknownPrecision";
36 #endif
37   MPI_Comm                 comm;
38   Vec                      Coords;
39   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
40   PetscViewerVTKObjectLink link;
41   FILE                    *fp;
42   PetscMPIInt              rank, size, tag;
43   DMDALocalInfo            info;
44   PetscInt                 dim, mx, my, mz, cdim, bs, boffset, maxnnodes, maxbs, i, j, k, r;
45   PetscInt                 rloc[6], (*grloc)[6] = NULL;
46   PetscScalar             *array, *array2;
47 
48   PetscFunctionBegin;
49   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
50   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
51   PetscCallMPI(MPI_Comm_size(comm, &size));
52   PetscCallMPI(MPI_Comm_rank(comm, &rank));
53   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
54   PetscCall(DMDAGetLocalInfo(da, &info));
55   PetscCall(DMGetCoordinates(da, &Coords));
56   if (Coords) {
57     PetscInt csize;
58     PetscCall(VecGetSize(Coords, &csize));
59     PetscCheck(csize % (mx * my * mz) == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
60     cdim = csize / (mx * my * mz);
61     PetscCheck(cdim >= dim && cdim <= 3, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Coordinate vector size mismatch");
62   } else {
63     cdim = dim;
64   }
65 
66   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
67   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
68   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order));
69   PetscCall(PetscFPrintf(comm, fp, "  <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));
70 
71   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
72   rloc[0] = info.xs;
73   rloc[1] = info.xm;
74   rloc[2] = info.ys;
75   rloc[3] = info.ym;
76   rloc[4] = info.zs;
77   rloc[5] = info.zm;
78   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm));
79 
80   /* Write XML header */
81   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
82   maxbs     = 0; /* Used for the temporary array size on rank 0 */
83   boffset   = 0; /* Offset into binary file */
84   for (r = 0; r < size; r++) {
85     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
86     if (rank == 0) {
87       xs     = grloc[r][0];
88       xm     = grloc[r][1];
89       ys     = grloc[r][2];
90       ym     = grloc[r][3];
91       zs     = grloc[r][4];
92       zm     = grloc[r][5];
93       nnodes = xm * ym * zm;
94     }
95     maxnnodes = PetscMax(maxnnodes, nnodes);
96     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
97     PetscCall(PetscFPrintf(comm, fp, "      <Points>\n"));
98     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
99     boffset += 3 * nnodes * sizeof(PetscScalar) + sizeof(int);
100     PetscCall(PetscFPrintf(comm, fp, "      </Points>\n"));
101 
102     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
103     for (link = vtk->link; link; link = link->next) {
104       Vec         X = (Vec)link->vec;
105       PetscInt    bs, f;
106       DM          daCurr;
107       PetscBool   fieldsnamed;
108       const char *vecname = "Unnamed";
109 
110       PetscCall(VecGetDM(X, &daCurr));
111       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
112       maxbs = PetscMax(maxbs, bs);
113 
114       if (((PetscObject)X)->name || link != vtk->link) { PetscCall(PetscObjectGetName((PetscObject)X, &vecname)); }
115 
116       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
117       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
118       if (fieldsnamed) {
119         for (f = 0; f < bs; f++) {
120           char        buf[256];
121           const char *fieldname;
122           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
123           if (!fieldname) {
124             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
125             fieldname = buf;
126           }
127           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, fieldname, boffset));
128           boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
129         }
130       } else {
131         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, bs, boffset));
132         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
133       }
134     }
135     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
136     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
137   }
138   PetscCall(PetscFPrintf(comm, fp, "  </StructuredGrid>\n"));
139   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
140   PetscCall(PetscFPrintf(comm, fp, "_"));
141 
142   /* Now write the arrays. */
143   tag = ((PetscObject)viewer)->tag;
144   PetscCall(PetscMalloc2(maxnnodes * PetscMax(3, maxbs), &array, maxnnodes * PetscMax(3, maxbs), &array2));
145   for (r = 0; r < size; r++) {
146     MPI_Status status;
147     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
148     if (rank == 0) {
149       xs     = grloc[r][0];
150       xm     = grloc[r][1];
151       ys     = grloc[r][2];
152       ym     = grloc[r][3];
153       zs     = grloc[r][4];
154       zm     = grloc[r][5];
155       nnodes = xm * ym * zm;
156     } else if (r == rank) {
157       nnodes = info.xm * info.ym * info.zm;
158     }
159 
160     /* Write the coordinates */
161     if (Coords) {
162       const PetscScalar *coords;
163       PetscCall(VecGetArrayRead(Coords, &coords));
164       if (rank == 0) {
165         if (r) {
166           PetscMPIInt nn;
167           PetscCallMPI(MPI_Recv(array, nnodes * cdim, MPIU_SCALAR, r, tag, comm, &status));
168           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
169           PetscCheck(nn == nnodes * cdim, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
170         } else PetscCall(PetscArraycpy(array, coords, nnodes * cdim));
171         /* Transpose coordinates to VTK (C-style) ordering */
172         for (k = 0; k < zm; k++) {
173           for (j = 0; j < ym; j++) {
174             for (i = 0; i < xm; i++) {
175               PetscInt Iloc        = i + xm * (j + ym * k);
176               array2[Iloc * 3 + 0] = array[Iloc * cdim + 0];
177               array2[Iloc * 3 + 1] = cdim > 1 ? array[Iloc * cdim + 1] : 0.0;
178               array2[Iloc * 3 + 2] = cdim > 2 ? array[Iloc * cdim + 2] : 0.0;
179             }
180           }
181         }
182       } else if (r == rank) {
183         PetscCallMPI(MPI_Send((void *)coords, nnodes * cdim, MPIU_SCALAR, 0, tag, comm));
184       }
185       PetscCall(VecRestoreArrayRead(Coords, &coords));
186     } else { /* Fabricate some coordinates using grid index */
187       for (k = 0; k < zm; k++) {
188         for (j = 0; j < ym; j++) {
189           for (i = 0; i < xm; i++) {
190             PetscInt Iloc        = i + xm * (j + ym * k);
191             array2[Iloc * 3 + 0] = xs + i;
192             array2[Iloc * 3 + 1] = ys + j;
193             array2[Iloc * 3 + 2] = zs + k;
194           }
195         }
196       }
197     }
198     PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes * 3, MPIU_SCALAR));
199 
200     /* Write each of the objects queued up for this file */
201     for (link = vtk->link; link; link = link->next) {
202       Vec                X = (Vec)link->vec;
203       const PetscScalar *x;
204       PetscInt           bs, f;
205       DM                 daCurr;
206       PetscBool          fieldsnamed;
207       PetscCall(VecGetDM(X, &daCurr));
208       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
209       PetscCall(VecGetArrayRead(X, &x));
210       if (rank == 0) {
211         if (r) {
212           PetscMPIInt nn;
213           PetscCallMPI(MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
214           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
215           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %" PetscInt_FMT, r);
216         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
217 
218         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
219         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
220         if (fieldsnamed) {
221           for (f = 0; f < bs; f++) {
222             /* Extract and transpose the f'th field */
223             for (k = 0; k < zm; k++) {
224               for (j = 0; j < ym; j++) {
225                 for (i = 0; i < xm; i++) {
226                   PetscInt Iloc = i + xm * (j + ym * k);
227                   array2[Iloc]  = array[Iloc * bs + f];
228                 }
229               }
230             }
231             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
232           }
233         } else PetscCall(PetscViewerVTKFWrite(viewer, fp, array, bs * nnodes, MPIU_SCALAR));
234       } else if (r == rank) PetscCallMPI(MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
235       PetscCall(VecRestoreArrayRead(X, &x));
236     }
237   }
238   PetscCall(PetscFree2(array, array2));
239   PetscCall(PetscFree(grloc));
240 
241   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
242   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
243   PetscCall(PetscFClose(comm, fp));
244   PetscFunctionReturn(0);
245 }
246 
247 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da, PetscViewer viewer) {
248   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
249 #if defined(PETSC_USE_REAL_SINGLE)
250   const char precision[] = "Float32";
251 #elif defined(PETSC_USE_REAL_DOUBLE)
252   const char precision[] = "Float64";
253 #else
254   const char precision[] = "UnknownPrecision";
255 #endif
256   MPI_Comm                 comm;
257   PetscViewer_VTK         *vtk = (PetscViewer_VTK *)viewer->data;
258   PetscViewerVTKObjectLink link;
259   FILE                    *fp;
260   PetscMPIInt              rank, size, tag;
261   DMDALocalInfo            info;
262   PetscInt                 dim, mx, my, mz, boffset, maxnnodes, maxbs, i, j, k, r;
263   PetscInt                 rloc[6], (*grloc)[6] = NULL;
264   PetscScalar             *array, *array2;
265 
266   PetscFunctionBegin;
267   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
268   PetscCheck(!PetscDefined(USE_COMPLEX), PETSC_COMM_SELF, PETSC_ERR_SUP, "Complex values not supported");
269   PetscCallMPI(MPI_Comm_size(comm, &size));
270   PetscCallMPI(MPI_Comm_rank(comm, &rank));
271   PetscCall(DMDAGetInfo(da, &dim, &mx, &my, &mz, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
272   PetscCall(DMDAGetLocalInfo(da, &info));
273   PetscCall(PetscFOpen(comm, vtk->filename, "wb", &fp));
274   PetscCall(PetscFPrintf(comm, fp, "<?xml version=\"1.0\"?>\n"));
275   PetscCall(PetscFPrintf(comm, fp, "<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n", byte_order));
276   PetscCall(PetscFPrintf(comm, fp, "  <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n", 0, mx - 1, 0, my - 1, 0, mz - 1));
277 
278   if (rank == 0) PetscCall(PetscMalloc1(size * 6, &grloc));
279   rloc[0] = info.xs;
280   rloc[1] = info.xm;
281   rloc[2] = info.ys;
282   rloc[3] = info.ym;
283   rloc[4] = info.zs;
284   rloc[5] = info.zm;
285   PetscCallMPI(MPI_Gather(rloc, 6, MPIU_INT, &grloc[0][0], 6, MPIU_INT, 0, comm));
286 
287   /* Write XML header */
288   maxnnodes = 0; /* Used for the temporary array size on rank 0 */
289   maxbs     = 0; /* Used for the temporary array size on rank 0 */
290   boffset   = 0; /* Offset into binary file */
291   for (r = 0; r < size; r++) {
292     PetscInt xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
293     if (rank == 0) {
294       xs     = grloc[r][0];
295       xm     = grloc[r][1];
296       ys     = grloc[r][2];
297       ym     = grloc[r][3];
298       zs     = grloc[r][4];
299       zm     = grloc[r][5];
300       nnodes = xm * ym * zm;
301     }
302     maxnnodes = PetscMax(maxnnodes, nnodes);
303     PetscCall(PetscFPrintf(comm, fp, "    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n", xs, xs + xm - 1, ys, ys + ym - 1, zs, zs + zm - 1));
304     PetscCall(PetscFPrintf(comm, fp, "      <Coordinates>\n"));
305     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
306     boffset += xm * sizeof(PetscScalar) + sizeof(int);
307     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
308     boffset += ym * sizeof(PetscScalar) + sizeof(int);
309     PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n", precision, boffset));
310     boffset += zm * sizeof(PetscScalar) + sizeof(int);
311     PetscCall(PetscFPrintf(comm, fp, "      </Coordinates>\n"));
312     PetscCall(PetscFPrintf(comm, fp, "      <PointData Scalars=\"ScalarPointData\">\n"));
313     for (link = vtk->link; link; link = link->next) {
314       Vec         X = (Vec)link->vec;
315       PetscInt    bs, f;
316       DM          daCurr;
317       PetscBool   fieldsnamed;
318       const char *vecname = "Unnamed";
319 
320       PetscCall(VecGetDM(X, &daCurr));
321       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
322       maxbs = PetscMax(maxbs, bs);
323       if (((PetscObject)X)->name || link != vtk->link) { PetscCall(PetscObjectGetName((PetscObject)X, &vecname)); }
324 
325       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
326       PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
327       if (fieldsnamed) {
328         for (f = 0; f < bs; f++) {
329           char        buf[256];
330           const char *fieldname;
331           PetscCall(DMDAGetFieldName(daCurr, f, &fieldname));
332           if (!fieldname) {
333             PetscCall(PetscSNPrintf(buf, sizeof(buf), "%" PetscInt_FMT, f));
334             fieldname = buf;
335           }
336           PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, fieldname, boffset));
337           boffset += nnodes * sizeof(PetscScalar) + sizeof(int);
338         }
339       } else {
340         PetscCall(PetscFPrintf(comm, fp, "        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n", precision, vecname, bs, boffset));
341         boffset += bs * nnodes * sizeof(PetscScalar) + sizeof(int);
342       }
343     }
344     PetscCall(PetscFPrintf(comm, fp, "      </PointData>\n"));
345     PetscCall(PetscFPrintf(comm, fp, "    </Piece>\n"));
346   }
347   PetscCall(PetscFPrintf(comm, fp, "  </RectilinearGrid>\n"));
348   PetscCall(PetscFPrintf(comm, fp, "  <AppendedData encoding=\"raw\">\n"));
349   PetscCall(PetscFPrintf(comm, fp, "_"));
350 
351   /* Now write the arrays. */
352   tag = ((PetscObject)viewer)->tag;
353   PetscCall(PetscMalloc2(PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array, PetscMax(maxnnodes * maxbs, info.xm + info.ym + info.zm), &array2));
354   for (r = 0; r < size; r++) {
355     MPI_Status status;
356     PetscInt   xs = -1, xm = -1, ys = -1, ym = -1, zs = -1, zm = -1, nnodes = 0;
357     if (rank == 0) {
358       xs     = grloc[r][0];
359       xm     = grloc[r][1];
360       ys     = grloc[r][2];
361       ym     = grloc[r][3];
362       zs     = grloc[r][4];
363       zm     = grloc[r][5];
364       nnodes = xm * ym * zm;
365     } else if (r == rank) {
366       nnodes = info.xm * info.ym * info.zm;
367     }
368     { /* Write the coordinates */
369       Vec Coords;
370 
371       PetscCall(DMGetCoordinates(da, &Coords));
372       if (Coords) {
373         const PetscScalar *coords;
374         PetscCall(VecGetArrayRead(Coords, &coords));
375         if (rank == 0) {
376           if (r) {
377             PetscMPIInt nn;
378             PetscCallMPI(MPI_Recv(array, xm + ym + zm, MPIU_SCALAR, r, tag, comm, &status));
379             PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
380             PetscCheck(nn == xm + ym + zm, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch");
381           } else {
382             /* x coordinates */
383             for (j = 0, k = 0, i = 0; i < xm; i++) {
384               PetscInt Iloc = i + xm * (j + ym * k);
385               array[i]      = coords[Iloc * dim + 0];
386             }
387             /* y coordinates */
388             for (i = 0, k = 0, j = 0; j < ym; j++) {
389               PetscInt Iloc = i + xm * (j + ym * k);
390               array[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
391             }
392             /* z coordinates */
393             for (i = 0, j = 0, k = 0; k < zm; k++) {
394               PetscInt Iloc      = i + xm * (j + ym * k);
395               array[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
396             }
397           }
398         } else if (r == rank) {
399           xm = info.xm;
400           ym = info.ym;
401           zm = info.zm;
402           for (j = 0, k = 0, i = 0; i < xm; i++) {
403             PetscInt Iloc = i + xm * (j + ym * k);
404             array2[i]     = coords[Iloc * dim + 0];
405           }
406           for (i = 0, k = 0, j = 0; j < ym; j++) {
407             PetscInt Iloc  = i + xm * (j + ym * k);
408             array2[j + xm] = dim > 1 ? coords[Iloc * dim + 1] : 0;
409           }
410           for (i = 0, j = 0, k = 0; k < zm; k++) {
411             PetscInt Iloc       = i + xm * (j + ym * k);
412             array2[k + xm + ym] = dim > 2 ? coords[Iloc * dim + 2] : 0;
413           }
414           PetscCallMPI(MPI_Send((void *)array2, xm + ym + zm, MPIU_SCALAR, 0, tag, comm));
415         }
416         PetscCall(VecRestoreArrayRead(Coords, &coords));
417       } else { /* Fabricate some coordinates using grid index */
418         for (i = 0; i < xm; i++) { array[i] = xs + i; }
419         for (j = 0; j < ym; j++) { array[j + xm] = ys + j; }
420         for (k = 0; k < zm; k++) { array[k + xm + ym] = zs + k; }
421       }
422       if (rank == 0) {
423         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[0]), xm, MPIU_SCALAR));
424         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[xm]), ym, MPIU_SCALAR));
425         PetscCall(PetscViewerVTKFWrite(viewer, fp, &(array[xm + ym]), zm, MPIU_SCALAR));
426       }
427     }
428 
429     /* Write each of the objects queued up for this file */
430     for (link = vtk->link; link; link = link->next) {
431       Vec                X = (Vec)link->vec;
432       const PetscScalar *x;
433       PetscInt           bs, f;
434       DM                 daCurr;
435       PetscBool          fieldsnamed;
436       PetscCall(VecGetDM(X, &daCurr));
437       PetscCall(DMDAGetInfo(daCurr, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &bs, NULL, NULL, NULL, NULL, NULL));
438 
439       PetscCall(VecGetArrayRead(X, &x));
440       if (rank == 0) {
441         if (r) {
442           PetscMPIInt nn;
443           PetscCallMPI(MPI_Recv(array, nnodes * bs, MPIU_SCALAR, r, tag, comm, &status));
444           PetscCallMPI(MPI_Get_count(&status, MPIU_SCALAR, &nn));
445           PetscCheck(nn == nnodes * bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Array size mismatch receiving from rank %" PetscInt_FMT, r);
446         } else PetscCall(PetscArraycpy(array, x, nnodes * bs));
447         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
448         PetscCall(DMDAGetFieldsNamed(daCurr, &fieldsnamed));
449         if (fieldsnamed) {
450           for (f = 0; f < bs; f++) {
451             /* Extract and transpose the f'th field */
452             for (k = 0; k < zm; k++) {
453               for (j = 0; j < ym; j++) {
454                 for (i = 0; i < xm; i++) {
455                   PetscInt Iloc = i + xm * (j + ym * k);
456                   array2[Iloc]  = array[Iloc * bs + f];
457                 }
458               }
459             }
460             PetscCall(PetscViewerVTKFWrite(viewer, fp, array2, nnodes, MPIU_SCALAR));
461           }
462         }
463         PetscCall(PetscViewerVTKFWrite(viewer, fp, array, nnodes * bs, MPIU_SCALAR));
464 
465       } else if (r == rank) {
466         PetscCallMPI(MPI_Send((void *)x, nnodes * bs, MPIU_SCALAR, 0, tag, comm));
467       }
468       PetscCall(VecRestoreArrayRead(X, &x));
469     }
470   }
471   PetscCall(PetscFree2(array, array2));
472   PetscCall(PetscFree(grloc));
473 
474   PetscCall(PetscFPrintf(comm, fp, "\n </AppendedData>\n"));
475   PetscCall(PetscFPrintf(comm, fp, "</VTKFile>\n"));
476   PetscCall(PetscFClose(comm, fp));
477   PetscFunctionReturn(0);
478 }
479 
480 /*@C
481    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
482 
483    Collective
484 
485    Input Parameters:
486 +  odm - DM specifying the grid layout, passed as a PetscObject
487 -  viewer - viewer of type VTK
488 
489    Level: developer
490 
491    Notes:
492    This function is a callback used by the VTK viewer to actually write the file.
493    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
494    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
495 
496    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
497    fields are written. Otherwise, a single multi-dof (vector) field is written.
498 
499 .seealso: `PETSCVIEWERVTK`
500 @*/
501 PetscErrorCode DMDAVTKWriteAll(PetscObject odm, PetscViewer viewer) {
502   DM        dm = (DM)odm;
503   PetscBool isvtk;
504 
505   PetscFunctionBegin;
506   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
507   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
508   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERVTK, &isvtk));
509   PetscCheck(isvtk, PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_INCOMP, "Cannot use viewer type %s", ((PetscObject)viewer)->type_name);
510   switch (viewer->format) {
511   case PETSC_VIEWER_VTK_VTS: PetscCall(DMDAVTKWriteAll_VTS(dm, viewer)); break;
512   case PETSC_VIEWER_VTK_VTR: PetscCall(DMDAVTKWriteAll_VTR(dm, viewer)); break;
513   default: SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]);
514   }
515   PetscFunctionReturn(0);
516 }
517