xref: /petsc/src/dm/impls/da/grvtk.c (revision af0996ce37bc06907c37d8d91773840993d61e62)
1 #include <petsc/private/dmdaimpl.h>
2 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMDAVTKWriteAll_VTS"
6 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
7 {
8 #if defined(PETSC_USE_REAL_SINGLE)
9   const char precision[] = "Float32";
10 #elif defined(PETSC_USE_REAL_DOUBLE)
11   const char precision[] = "Float64";
12 #else
13   const char precision[] = "UnknownPrecision";
14 #endif
15   MPI_Comm                 comm;
16   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
17   PetscViewerVTKObjectLink link;
18   FILE                     *fp;
19   PetscMPIInt              rank,size,tag;
20   DMDALocalInfo            info;
21   PetscInt                 dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
22   PetscInt                 rloc[6],(*grloc)[6] = NULL;
23   PetscScalar              *array,*array2;
24   PetscReal                gmin[3],gmax[3];
25   PetscErrorCode           ierr;
26 
27   PetscFunctionBegin;
28   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
29 #if defined(PETSC_USE_COMPLEX)
30   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
31 #endif
32   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
33   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
34   ierr = DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);CHKERRQ(ierr);
35   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
36   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
37 
38   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
39   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
40 #if defined(PETSC_WORDS_BIGENDIAN)
41   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");CHKERRQ(ierr);
42 #else
43   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");CHKERRQ(ierr);
44 #endif
45   ierr = PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
46 
47   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
48   rloc[0] = info.xs;
49   rloc[1] = info.xm;
50   rloc[2] = info.ys;
51   rloc[3] = info.ym;
52   rloc[4] = info.zs;
53   rloc[5] = info.zm;
54   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
55 
56   /* Write XML header */
57   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
58   boffset   = 0;                /* Offset into binary file */
59   for (r=0; r<size; r++) {
60     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
61     if (!rank) {
62       xs     = grloc[r][0];
63       xm     = grloc[r][1];
64       ys     = grloc[r][2];
65       ym     = grloc[r][3];
66       zs     = grloc[r][4];
67       zm     = grloc[r][5];
68       nnodes = xm*ym*zm;
69     }
70     maxnnodes = PetscMax(maxnnodes,nnodes);
71 #if 0
72     switch (dim) {
73     case 1:
74       ierr = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,0,0,0,0);CHKERRQ(ierr);
75       break;
76     case 2:
77       ierr = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm,ys+ym-1,xs,xs+xm-1,0,0);CHKERRQ(ierr);
78       break;
79     case 3:
80       ierr = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
81       break;
82     default: SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"No support for dimension %D",dim);
83     }
84 #endif
85     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
86     ierr     = PetscFPrintf(comm,fp,"      <Points>\n");CHKERRQ(ierr);
87     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
88     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
89     ierr     = PetscFPrintf(comm,fp,"      </Points>\n");CHKERRQ(ierr);
90 
91     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
92     for (link=vtk->link; link; link=link->next) {
93       Vec        X        = (Vec)link->vec;
94       const char *vecname = "";
95       if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
96         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
97       }
98       for (i=0; i<bs; i++) {
99         char       buf[256];
100         const char *fieldname;
101         ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
102         if (!fieldname) {
103           ierr      = PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);CHKERRQ(ierr);
104           fieldname = buf;
105         }
106         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
107         boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
108       }
109     }
110     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
111     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
112   }
113   ierr = PetscFPrintf(comm,fp,"  </StructuredGrid>\n");CHKERRQ(ierr);
114   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
115   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
116 
117   /* Now write the arrays. */
118   tag  = ((PetscObject)viewer)->tag;
119   ierr = PetscMalloc2(maxnnodes*PetscMax(3,bs),&array,maxnnodes*3,&array2);CHKERRQ(ierr);
120   for (r=0; r<size; r++) {
121     MPI_Status status;
122     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
123     if (!rank) {
124       xs     = grloc[r][0];
125       xm     = grloc[r][1];
126       ys     = grloc[r][2];
127       ym     = grloc[r][3];
128       zs     = grloc[r][4];
129       zm     = grloc[r][5];
130       nnodes = xm*ym*zm;
131     } else if (r == rank) {
132       nnodes = info.xm*info.ym*info.zm;
133     }
134 
135     {                           /* Write the coordinates */
136       Vec Coords;
137       ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
138       if (Coords) {
139         const PetscScalar *coords;
140         ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
141         if (!rank) {
142           if (r) {
143             PetscMPIInt nn;
144             ierr = MPI_Recv(array,nnodes*dim,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
145             ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
146             if (nn != nnodes*dim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
147           } else {
148             ierr = PetscMemcpy(array,coords,nnodes*dim*sizeof(PetscScalar));CHKERRQ(ierr);
149           }
150           /* Transpose coordinates to VTK (C-style) ordering */
151           for (k=0; k<zm; k++) {
152             for (j=0; j<ym; j++) {
153               for (i=0; i<xm; i++) {
154                 PetscInt Iloc = i+xm*(j+ym*k);
155                 array2[Iloc*3+0] = array[Iloc*dim + 0];
156                 array2[Iloc*3+1] = dim > 1 ? array[Iloc*dim + 1] : 0;
157                 array2[Iloc*3+2] = dim > 2 ? array[Iloc*dim + 2] : 0;
158               }
159             }
160           }
161         } else if (r == rank) {
162           ierr = MPI_Send((void*)coords,nnodes*dim,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
163         }
164         ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
165       } else {       /* Fabricate some coordinates using grid index */
166         for (k=0; k<zm; k++) {
167           for (j=0; j<ym; j++) {
168             for (i=0; i<xm; i++) {
169               PetscInt Iloc = i+xm*(j+ym*k);
170               array2[Iloc*3+0] = xs+i;
171               array2[Iloc*3+1] = ys+j;
172               array2[Iloc*3+2] = zs+k;
173             }
174           }
175         }
176       }
177       ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,PETSC_SCALAR);CHKERRQ(ierr);
178     }
179 
180     /* Write each of the objects queued up for this file */
181     for (link=vtk->link; link; link=link->next) {
182       Vec               X = (Vec)link->vec;
183       const PetscScalar *x;
184 
185       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
186       if (!rank) {
187         if (r) {
188           PetscMPIInt nn;
189           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
190           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
191           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
192         } else {
193           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
194         }
195         for (f=0; f<bs; f++) {
196           /* Extract and transpose the f'th field */
197           for (k=0; k<zm; k++) {
198             for (j=0; j<ym; j++) {
199               for (i=0; i<xm; i++) {
200                 PetscInt Iloc = i+xm*(j+ym*k);
201                 array2[Iloc] = array[Iloc*bs + f];
202               }
203             }
204           }
205           ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);CHKERRQ(ierr);
206         }
207       } else if (r == rank) {
208         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
209       }
210       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
211     }
212   }
213   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
214   ierr = PetscFree(grloc);CHKERRQ(ierr);
215 
216   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
217   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
218   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 
223 #undef __FUNCT__
224 #define __FUNCT__ "DMDAVTKWriteAll_VTR"
225 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
226 {
227 #if defined(PETSC_USE_REAL_SINGLE)
228   const char precision[] = "Float32";
229 #elif defined(PETSC_USE_REAL_DOUBLE)
230   const char precision[] = "Float64";
231 #else
232   const char precision[] = "UnknownPrecision";
233 #endif
234   MPI_Comm                 comm;
235   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
236   PetscViewerVTKObjectLink link;
237   FILE                     *fp;
238   PetscMPIInt              rank,size,tag;
239   DMDALocalInfo            info;
240   PetscInt                 dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
241   PetscInt                 rloc[6],(*grloc)[6] = NULL;
242   PetscScalar              *array,*array2;
243   PetscReal                gmin[3],gmax[3];
244   PetscErrorCode           ierr;
245 
246   PetscFunctionBegin;
247   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
248 #if defined(PETSC_USE_COMPLEX)
249   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
250 #endif
251   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
252   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
253   ierr = DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);CHKERRQ(ierr);
254   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
255   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
256   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
257   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
258 #if defined(PETSC_WORDS_BIGENDIAN)
259   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");CHKERRQ(ierr);
260 #else
261   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");CHKERRQ(ierr);
262 #endif
263   ierr = PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
264 
265   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
266   rloc[0] = info.xs;
267   rloc[1] = info.xm;
268   rloc[2] = info.ys;
269   rloc[3] = info.ym;
270   rloc[4] = info.zs;
271   rloc[5] = info.zm;
272   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
273 
274   /* Write XML header */
275   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
276   boffset   = 0;                /* Offset into binary file */
277   for (r=0; r<size; r++) {
278     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
279     if (!rank) {
280       xs     = grloc[r][0];
281       xm     = grloc[r][1];
282       ys     = grloc[r][2];
283       ym     = grloc[r][3];
284       zs     = grloc[r][4];
285       zm     = grloc[r][5];
286       nnodes = xm*ym*zm;
287     }
288     maxnnodes = PetscMax(maxnnodes,nnodes);
289     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
290     ierr     = PetscFPrintf(comm,fp,"      <Coordinates>\n");CHKERRQ(ierr);
291     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
292     boffset += xm*sizeof(PetscScalar) + sizeof(int);
293     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
294     boffset += ym*sizeof(PetscScalar) + sizeof(int);
295     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
296     boffset += zm*sizeof(PetscScalar) + sizeof(int);
297     ierr     = PetscFPrintf(comm,fp,"      </Coordinates>\n");CHKERRQ(ierr);
298     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
299     for (link=vtk->link; link; link=link->next) {
300       Vec        X        = (Vec)link->vec;
301       const char *vecname = "";
302       if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
303         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
304       }
305       for (i=0; i<bs; i++) {
306         char       buf[256];
307         const char *fieldname;
308         ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
309         if (!fieldname) {
310           ierr      = PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);CHKERRQ(ierr);
311           fieldname = buf;
312         }
313         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
314         boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
315       }
316     }
317     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
318     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
319   }
320   ierr = PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");CHKERRQ(ierr);
321   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
322   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
323 
324   /* Now write the arrays. */
325   tag  = ((PetscObject)viewer)->tag;
326   ierr = PetscMalloc2(PetscMax(maxnnodes*bs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*3,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr);
327   for (r=0; r<size; r++) {
328     MPI_Status status;
329     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
330     if (!rank) {
331       xs     = grloc[r][0];
332       xm     = grloc[r][1];
333       ys     = grloc[r][2];
334       ym     = grloc[r][3];
335       zs     = grloc[r][4];
336       zm     = grloc[r][5];
337       nnodes = xm*ym*zm;
338     } else if (r == rank) {
339       nnodes = info.xm*info.ym*info.zm;
340     }
341     {                           /* Write the coordinates */
342       Vec Coords;
343       ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
344       if (Coords) {
345         const PetscScalar *coords;
346         ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
347         if (!rank) {
348           if (r) {
349             PetscMPIInt nn;
350             ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
351             ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
352             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
353           } else {
354             /* x coordinates */
355             for (j=0, k=0, i=0; i<xm; i++) {
356               PetscInt Iloc = i+xm*(j+ym*k);
357               array[i] = coords[Iloc*dim + 0];
358             }
359             /* y coordinates */
360             for (i=0, k=0, j=0; j<ym; j++) {
361               PetscInt Iloc = i+xm*(j+ym*k);
362               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
363             }
364             /* z coordinates */
365             for (i=0, j=0, k=0; k<zm; k++) {
366               PetscInt Iloc = i+xm*(j+ym*k);
367               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
368             }
369           }
370         } else if (r == rank) {
371           xm = info.xm;
372           ym = info.ym;
373           zm = info.zm;
374           for (j=0, k=0, i=0; i<xm; i++) {
375             PetscInt Iloc = i+xm*(j+ym*k);
376             array2[i] = coords[Iloc*dim + 0];
377           }
378           for (i=0, k=0, j=0; j<ym; j++) {
379             PetscInt Iloc = i+xm*(j+ym*k);
380             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
381           }
382           for (i=0, j=0, k=0; k<zm; k++) {
383             PetscInt Iloc = i+xm*(j+ym*k);
384             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
385           }
386           ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
387         }
388         ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
389       } else {       /* Fabricate some coordinates using grid index */
390         for (j=0, k=0, i=0; i<xm; i++) {
391           array[i] = xs+i;
392         }
393         for (i=0, k=0, j=0; j<ym; j++) {
394           array[j+xm] = ys+j;
395         }
396         for (i=0, j=0, k=0; k<zm; k++) {
397           array[k+xm+ym] = zs+k;
398         }
399       }
400       if (!rank) {
401         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,PETSC_SCALAR);CHKERRQ(ierr);
402         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,PETSC_SCALAR);CHKERRQ(ierr);
403         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,PETSC_SCALAR);CHKERRQ(ierr);
404       }
405     }
406 
407     /* Write each of the objects queued up for this file */
408     for (link=vtk->link; link; link=link->next) {
409       Vec               X = (Vec)link->vec;
410       const PetscScalar *x;
411 
412       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
413       if (!rank) {
414         if (r) {
415           PetscMPIInt nn;
416           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
417           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
418           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
419         } else {
420           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
421         }
422         for (f=0; f<bs; f++) {
423           /* Extract and transpose the f'th field */
424           for (k=0; k<zm; k++) {
425             for (j=0; j<ym; j++) {
426               for (i=0; i<xm; i++) {
427                 PetscInt Iloc = i+xm*(j+ym*k);
428                 array2[Iloc] = array[Iloc*bs + f];
429               }
430             }
431           }
432           ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);CHKERRQ(ierr);
433         }
434       } else if (r == rank) {
435         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
436       }
437       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
438     }
439   }
440   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
441   ierr = PetscFree(grloc);CHKERRQ(ierr);
442 
443   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
444   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
445   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 
449 #undef __FUNCT__
450 #define __FUNCT__ "DMDAVTKWriteAll"
451 /*@C
452    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
453 
454    Collective
455 
456    Input Arguments:
457    odm - DM specifying the grid layout, passed as a PetscObject
458    viewer - viewer of type VTK
459 
460    Level: developer
461 
462    Note:
463    This function is a callback used by the VTK viewer to actually write the file.
464    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
465    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
466 
467 .seealso: PETSCVIEWERVTK
468 @*/
469 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
470 {
471   DM             dm = (DM)odm;
472   PetscBool      isvtk;
473   PetscErrorCode ierr;
474 
475   PetscFunctionBegin;
476   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
477   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
478   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr);
479   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
480   switch (viewer->format) {
481   case PETSC_VIEWER_VTK_VTS:
482     ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr);
483     break;
484   case PETSC_VIEWER_VTK_VTR:
485     ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr);
486     break;
487   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
488   }
489   PetscFunctionReturn(0);
490 }
491