xref: /petsc/src/dm/impls/da/grvtk.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
12 {
13   PetscInt       f,bs;
14 
15   PetscFunctionBegin;
16   *fieldsnamed = PETSC_FALSE;
17   PetscCall(DMDAGetDof(da,&bs));
18   for (f=0; f<bs; ++f) {
19     const char * fieldname;
20     PetscCall(DMDAGetFieldName(da,f,&fieldname));
21     if (fieldname) {
22       *fieldsnamed = PETSC_TRUE;
23       break;
24     }
25   }
26   PetscFunctionReturn(0);
27 }
28 
29 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
30 {
31   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
32 #if defined(PETSC_USE_REAL_SINGLE)
33   const char precision[] = "Float32";
34 #elif defined(PETSC_USE_REAL_DOUBLE)
35   const char precision[] = "Float64";
36 #else
37   const char precision[] = "UnknownPrecision";
38 #endif
39   MPI_Comm                 comm;
40   Vec                      Coords;
41   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
42   PetscViewerVTKObjectLink link;
43   FILE                     *fp;
44   PetscMPIInt              rank,size,tag;
45   DMDALocalInfo            info;
46   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
47   PetscInt                 rloc[6],(*grloc)[6] = NULL;
48   PetscScalar              *array,*array2;
49 
50   PetscFunctionBegin;
51   PetscCall(PetscObjectGetComm((PetscObject)da,&comm));
52   PetscCheck(!PetscDefined(USE_COMPLEX),PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported");
53   PetscCallMPI(MPI_Comm_size(comm,&size));
54   PetscCallMPI(MPI_Comm_rank(comm,&rank));
55   PetscCall(DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL));
56   PetscCall(DMDAGetLocalInfo(da,&info));
57   PetscCall(DMGetCoordinates(da,&Coords));
58   if (Coords) {
59     PetscInt csize;
60     PetscCall(VecGetSize(Coords,&csize));
61     PetscCheck(csize % (mx*my*mz) == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
62     cdim = csize/(mx*my*mz);
63     PetscCheck(cdim >= dim && cdim <= 3,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
64   } else {
65     cdim = dim;
66   }
67 
68   PetscCall(PetscFOpen(comm,vtk->filename,"wb",&fp));
69   PetscCall(PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n"));
70   PetscCall(PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order));
71   PetscCall(PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n",0,mx-1,0,my-1,0,mz-1));
72 
73   if (rank == 0) PetscCall(PetscMalloc1(size*6,&grloc));
74   rloc[0] = info.xs;
75   rloc[1] = info.xm;
76   rloc[2] = info.ys;
77   rloc[3] = info.ym;
78   rloc[4] = info.zs;
79   rloc[5] = info.zm;
80   PetscCallMPI(MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm));
81 
82   /* Write XML header */
83   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
84   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
85   boffset   = 0;                /* Offset into binary file */
86   for (r=0; r<size; r++) {
87     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
88     if (rank == 0) {
89       xs     = grloc[r][0];
90       xm     = grloc[r][1];
91       ys     = grloc[r][2];
92       ym     = grloc[r][3];
93       zs     = grloc[r][4];
94       zm     = grloc[r][5];
95       nnodes = xm*ym*zm;
96     }
97     maxnnodes = PetscMax(maxnnodes,nnodes);
98     PetscCall(PetscFPrintf(comm,fp,"    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1));
99     PetscCall(PetscFPrintf(comm,fp,"      <Points>\n"));
100     PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,boffset));
101     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
102     PetscCall(PetscFPrintf(comm,fp,"      </Points>\n"));
103 
104     PetscCall(PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n"));
105     for (link=vtk->link; link; link=link->next) {
106       Vec        X = (Vec)link->vec;
107       PetscInt   bs,f;
108       DM         daCurr;
109       PetscBool  fieldsnamed;
110       const char *vecname = "Unnamed";
111 
112       PetscCall(VecGetDM(X,&daCurr));
113       PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL));
114       maxbs = PetscMax(maxbs,bs);
115 
116       if (((PetscObject)X)->name || link != vtk->link) {
117         PetscCall(PetscObjectGetName((PetscObject)X,&vecname));
118       }
119 
120       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
121       PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed));
122       if (fieldsnamed) {
123         for (f=0; f<bs; f++) {
124           char       buf[256];
125           const char *fieldname;
126           PetscCall(DMDAGetFieldName(daCurr,f,&fieldname));
127           if (!fieldname) {
128             PetscCall(PetscSNPrintf(buf,sizeof(buf),"%" PetscInt_FMT,f));
129             fieldname = buf;
130           }
131           PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,fieldname,boffset));
132           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
133         }
134       } else {
135         PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,bs,boffset));
136         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
137       }
138     }
139     PetscCall(PetscFPrintf(comm,fp,"      </PointData>\n"));
140     PetscCall(PetscFPrintf(comm,fp,"    </Piece>\n"));
141   }
142   PetscCall(PetscFPrintf(comm,fp,"  </StructuredGrid>\n"));
143   PetscCall(PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n"));
144   PetscCall(PetscFPrintf(comm,fp,"_"));
145 
146   /* Now write the arrays. */
147   tag  = ((PetscObject)viewer)->tag;
148   PetscCall(PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2));
149   for (r=0; r<size; r++) {
150     MPI_Status status;
151     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
152     if (rank == 0) {
153       xs     = grloc[r][0];
154       xm     = grloc[r][1];
155       ys     = grloc[r][2];
156       ym     = grloc[r][3];
157       zs     = grloc[r][4];
158       zm     = grloc[r][5];
159       nnodes = xm*ym*zm;
160     } else if (r == rank) {
161       nnodes = info.xm*info.ym*info.zm;
162     }
163 
164     /* Write the coordinates */
165     if (Coords) {
166       const PetscScalar *coords;
167       PetscCall(VecGetArrayRead(Coords,&coords));
168       if (rank == 0) {
169         if (r) {
170           PetscMPIInt nn;
171           PetscCallMPI(MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status));
172           PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn));
173           PetscCheck(nn == nnodes*cdim,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
174         } else {
175           PetscCall(PetscArraycpy(array,coords,nnodes*cdim));
176         }
177         /* Transpose coordinates to VTK (C-style) ordering */
178         for (k=0; k<zm; k++) {
179           for (j=0; j<ym; j++) {
180             for (i=0; i<xm; i++) {
181               PetscInt Iloc = i+xm*(j+ym*k);
182               array2[Iloc*3+0] = array[Iloc*cdim + 0];
183               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
184               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
185             }
186           }
187         }
188       } else if (r == rank) {
189         PetscCallMPI(MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm));
190       }
191       PetscCall(VecRestoreArrayRead(Coords,&coords));
192     } else {       /* Fabricate some coordinates using grid index */
193       for (k=0; k<zm; k++) {
194         for (j=0; j<ym; j++) {
195           for (i=0; i<xm; i++) {
196             PetscInt Iloc = i+xm*(j+ym*k);
197             array2[Iloc*3+0] = xs+i;
198             array2[Iloc*3+1] = ys+j;
199             array2[Iloc*3+2] = zs+k;
200           }
201         }
202       }
203     }
204     PetscCall(PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR));
205 
206     /* Write each of the objects queued up for this file */
207     for (link=vtk->link; link; link=link->next) {
208       Vec               X = (Vec)link->vec;
209       const PetscScalar *x;
210       PetscInt          bs,f;
211       DM                daCurr;
212       PetscBool         fieldsnamed;
213       PetscCall(VecGetDM(X,&daCurr));
214       PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL));
215       PetscCall(VecGetArrayRead(X,&x));
216       if (rank == 0) {
217         if (r) {
218           PetscMPIInt nn;
219           PetscCallMPI(MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status));
220           PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn));
221           PetscCheck(nn == nnodes*bs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %" PetscInt_FMT,r);
222         } else {
223           PetscCall(PetscArraycpy(array,x,nnodes*bs));
224         }
225 
226         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
227         PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed));
228         if (fieldsnamed) {
229           for (f=0; f<bs; f++) {
230             /* Extract and transpose the f'th field */
231             for (k=0; k<zm; k++) {
232               for (j=0; j<ym; j++) {
233                 for (i=0; i<xm; i++) {
234                   PetscInt Iloc = i+xm*(j+ym*k);
235                   array2[Iloc] = array[Iloc*bs + f];
236                 }
237               }
238             }
239             PetscCall(PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR));
240           }
241         } else {
242           PetscCall(PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR));
243         }
244       } else if (r == rank) {
245         PetscCallMPI(MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm));
246       }
247       PetscCall(VecRestoreArrayRead(X,&x));
248     }
249   }
250   PetscCall(PetscFree2(array,array2));
251   PetscCall(PetscFree(grloc));
252 
253   PetscCall(PetscFPrintf(comm,fp,"\n </AppendedData>\n"));
254   PetscCall(PetscFPrintf(comm,fp,"</VTKFile>\n"));
255   PetscCall(PetscFClose(comm,fp));
256   PetscFunctionReturn(0);
257 }
258 
259 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
260 {
261   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
262 #if defined(PETSC_USE_REAL_SINGLE)
263   const char precision[] = "Float32";
264 #elif defined(PETSC_USE_REAL_DOUBLE)
265   const char precision[] = "Float64";
266 #else
267   const char precision[] = "UnknownPrecision";
268 #endif
269   MPI_Comm                 comm;
270   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
271   PetscViewerVTKObjectLink link;
272   FILE                     *fp;
273   PetscMPIInt              rank,size,tag;
274   DMDALocalInfo            info;
275   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
276   PetscInt                 rloc[6],(*grloc)[6] = NULL;
277   PetscScalar              *array,*array2;
278 
279   PetscFunctionBegin;
280   PetscCall(PetscObjectGetComm((PetscObject)da,&comm));
281   PetscCheck(!PetscDefined(USE_COMPLEX),PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported");
282   PetscCallMPI(MPI_Comm_size(comm,&size));
283   PetscCallMPI(MPI_Comm_rank(comm,&rank));
284   PetscCall(DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL));
285   PetscCall(DMDAGetLocalInfo(da,&info));
286   PetscCall(PetscFOpen(comm,vtk->filename,"wb",&fp));
287   PetscCall(PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n"));
288   PetscCall(PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order));
289   PetscCall(PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n",0,mx-1,0,my-1,0,mz-1));
290 
291   if (rank == 0) PetscCall(PetscMalloc1(size*6,&grloc));
292   rloc[0] = info.xs;
293   rloc[1] = info.xm;
294   rloc[2] = info.ys;
295   rloc[3] = info.ym;
296   rloc[4] = info.zs;
297   rloc[5] = info.zm;
298   PetscCallMPI(MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm));
299 
300   /* Write XML header */
301   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
302   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
303   boffset   = 0;                /* Offset into binary file */
304   for (r=0; r<size; r++) {
305     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
306     if (rank == 0) {
307       xs     = grloc[r][0];
308       xm     = grloc[r][1];
309       ys     = grloc[r][2];
310       ym     = grloc[r][3];
311       zs     = grloc[r][4];
312       zm     = grloc[r][5];
313       nnodes = xm*ym*zm;
314     }
315     maxnnodes = PetscMax(maxnnodes,nnodes);
316     PetscCall(PetscFPrintf(comm,fp,"    <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1));
317     PetscCall(PetscFPrintf(comm,fp,"      <Coordinates>\n"));
318     PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n",precision,boffset));
319     boffset += xm*sizeof(PetscScalar) + sizeof(int);
320     PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n",precision,boffset));
321     boffset += ym*sizeof(PetscScalar) + sizeof(int);
322     PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%" PetscInt_FMT "\" />\n",precision,boffset));
323     boffset += zm*sizeof(PetscScalar) + sizeof(int);
324     PetscCall(PetscFPrintf(comm,fp,"      </Coordinates>\n"));
325     PetscCall(PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n"));
326     for (link=vtk->link; link; link=link->next) {
327       Vec        X = (Vec)link->vec;
328       PetscInt   bs,f;
329       DM         daCurr;
330       PetscBool  fieldsnamed;
331       const char *vecname = "Unnamed";
332 
333       PetscCall(VecGetDM(X,&daCurr));
334       PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL));
335       maxbs = PetscMax(maxbs,bs);
336       if (((PetscObject)X)->name || link != vtk->link) {
337         PetscCall(PetscObjectGetName((PetscObject)X,&vecname));
338       }
339 
340       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
341       PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed));
342       if (fieldsnamed) {
343         for (f=0; f<bs; f++) {
344           char       buf[256];
345           const char *fieldname;
346           PetscCall(DMDAGetFieldName(daCurr,f,&fieldname));
347           if (!fieldname) {
348             PetscCall(PetscSNPrintf(buf,sizeof(buf),"%" PetscInt_FMT,f));
349             fieldname = buf;
350           }
351           PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,fieldname,boffset));
352           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
353         }
354       } else {
355         PetscCall(PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,bs,boffset));
356         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
357       }
358     }
359     PetscCall(PetscFPrintf(comm,fp,"      </PointData>\n"));
360     PetscCall(PetscFPrintf(comm,fp,"    </Piece>\n"));
361   }
362   PetscCall(PetscFPrintf(comm,fp,"  </RectilinearGrid>\n"));
363   PetscCall(PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n"));
364   PetscCall(PetscFPrintf(comm,fp,"_"));
365 
366   /* Now write the arrays. */
367   tag  = ((PetscObject)viewer)->tag;
368   PetscCall(PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2));
369   for (r=0; r<size; r++) {
370     MPI_Status status;
371     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
372     if (rank == 0) {
373       xs     = grloc[r][0];
374       xm     = grloc[r][1];
375       ys     = grloc[r][2];
376       ym     = grloc[r][3];
377       zs     = grloc[r][4];
378       zm     = grloc[r][5];
379       nnodes = xm*ym*zm;
380     } else if (r == rank) {
381       nnodes = info.xm*info.ym*info.zm;
382     }
383     {                           /* Write the coordinates */
384       Vec Coords;
385 
386       PetscCall(DMGetCoordinates(da,&Coords));
387       if (Coords) {
388         const PetscScalar *coords;
389         PetscCall(VecGetArrayRead(Coords,&coords));
390         if (rank == 0) {
391           if (r) {
392             PetscMPIInt nn;
393             PetscCallMPI(MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status));
394             PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn));
395             PetscCheck(nn == xm+ym+zm,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
396           } else {
397             /* x coordinates */
398             for (j=0, k=0, i=0; i<xm; i++) {
399               PetscInt Iloc = i+xm*(j+ym*k);
400               array[i] = coords[Iloc*dim + 0];
401             }
402             /* y coordinates */
403             for (i=0, k=0, j=0; j<ym; j++) {
404               PetscInt Iloc = i+xm*(j+ym*k);
405               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
406             }
407             /* z coordinates */
408             for (i=0, j=0, k=0; k<zm; k++) {
409               PetscInt Iloc = i+xm*(j+ym*k);
410               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
411             }
412           }
413         } else if (r == rank) {
414           xm = info.xm;
415           ym = info.ym;
416           zm = info.zm;
417           for (j=0, k=0, i=0; i<xm; i++) {
418             PetscInt Iloc = i+xm*(j+ym*k);
419             array2[i] = coords[Iloc*dim + 0];
420           }
421           for (i=0, k=0, j=0; j<ym; j++) {
422             PetscInt Iloc = i+xm*(j+ym*k);
423             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
424           }
425           for (i=0, j=0, k=0; k<zm; k++) {
426             PetscInt Iloc = i+xm*(j+ym*k);
427             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
428           }
429           PetscCallMPI(MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm));
430         }
431         PetscCall(VecRestoreArrayRead(Coords,&coords));
432       } else {       /* Fabricate some coordinates using grid index */
433         for (i=0; i<xm; i++) {
434           array[i] = xs+i;
435         }
436         for (j=0; j<ym; j++) {
437           array[j+xm] = ys+j;
438         }
439         for (k=0; k<zm; k++) {
440           array[k+xm+ym] = zs+k;
441         }
442       }
443       if (rank == 0) {
444         PetscCall(PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR));
445         PetscCall(PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR));
446         PetscCall(PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR));
447       }
448     }
449 
450     /* Write each of the objects queued up for this file */
451     for (link=vtk->link; link; link=link->next) {
452       Vec               X = (Vec)link->vec;
453       const PetscScalar *x;
454       PetscInt          bs,f;
455       DM                daCurr;
456       PetscBool         fieldsnamed;
457       PetscCall(VecGetDM(X,&daCurr));
458       PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL));
459 
460       PetscCall(VecGetArrayRead(X,&x));
461       if (rank == 0) {
462         if (r) {
463           PetscMPIInt nn;
464           PetscCallMPI(MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status));
465           PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn));
466           PetscCheck(nn == nnodes*bs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %" PetscInt_FMT,r);
467         } else {
468           PetscCall(PetscArraycpy(array,x,nnodes*bs));
469         }
470         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
471         PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed));
472         if (fieldsnamed) {
473           for (f=0; f<bs; f++) {
474             /* Extract and transpose the f'th field */
475             for (k=0; k<zm; k++) {
476               for (j=0; j<ym; j++) {
477                 for (i=0; i<xm; i++) {
478                   PetscInt Iloc = i+xm*(j+ym*k);
479                   array2[Iloc] = array[Iloc*bs + f];
480                 }
481               }
482             }
483             PetscCall(PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR));
484           }
485         }
486         PetscCall(PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR));
487 
488       } else if (r == rank) {
489         PetscCallMPI(MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm));
490       }
491       PetscCall(VecRestoreArrayRead(X,&x));
492     }
493   }
494   PetscCall(PetscFree2(array,array2));
495   PetscCall(PetscFree(grloc));
496 
497   PetscCall(PetscFPrintf(comm,fp,"\n </AppendedData>\n"));
498   PetscCall(PetscFPrintf(comm,fp,"</VTKFile>\n"));
499   PetscCall(PetscFClose(comm,fp));
500   PetscFunctionReturn(0);
501 }
502 
503 /*@C
504    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
505 
506    Collective
507 
508    Input Parameters:
509 +  odm - DM specifying the grid layout, passed as a PetscObject
510 -  viewer - viewer of type VTK
511 
512    Level: developer
513 
514    Notes:
515    This function is a callback used by the VTK viewer to actually write the file.
516    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
517    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
518 
519    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
520    fields are written. Otherwise, a single multi-dof (vector) field is written.
521 
522 .seealso: `PETSCVIEWERVTK`
523 @*/
524 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
525 {
526   DM             dm = (DM)odm;
527   PetscBool      isvtk;
528 
529   PetscFunctionBegin;
530   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
531   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
532   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk));
533   PetscCheck(isvtk,PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
534   switch (viewer->format) {
535   case PETSC_VIEWER_VTK_VTS:
536     PetscCall(DMDAVTKWriteAll_VTS(dm,viewer));
537     break;
538   case PETSC_VIEWER_VTK_VTR:
539     PetscCall(DMDAVTKWriteAll_VTR(dm,viewer));
540     break;
541   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
542   }
543   PetscFunctionReturn(0);
544 }
545