xref: /petsc/src/dm/impls/da/grvtk.c (revision b41ce5d507ea9a58bfa83cf403107a702e77a67d)
1 #include <petsc/private/dmdaimpl.h>
2 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
3 
4 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
5 {
6 #if defined(PETSC_USE_REAL_SINGLE)
7   const char precision[] = "Float32";
8 #elif defined(PETSC_USE_REAL_DOUBLE)
9   const char precision[] = "Float64";
10 #else
11   const char precision[] = "UnknownPrecision";
12 #endif
13   MPI_Comm                 comm;
14   Vec                      Coords;
15   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
16   PetscViewerVTKObjectLink link;
17   FILE                     *fp;
18   PetscMPIInt              rank,size,tag;
19   DMDALocalInfo            info;
20   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,i,j,k,f,r;
21   PetscInt                 rloc[6],(*grloc)[6] = NULL;
22   PetscScalar              *array,*array2;
23   PetscReal                gmin[3],gmax[3];
24   PetscErrorCode           ierr;
25 
26   PetscFunctionBegin;
27   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
28 #if defined(PETSC_USE_COMPLEX)
29   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
30 #endif
31   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
32   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
33   ierr = DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);CHKERRQ(ierr);
34   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
35   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
36   ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
37   if (Coords) {
38     PetscInt csize;
39     ierr = VecGetSize(Coords,&csize);CHKERRQ(ierr);
40     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
41     cdim = csize/(mx*my*mz);
42     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
43   } else {
44     cdim = dim;
45   }
46 
47   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
48   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
49 #if defined(PETSC_WORDS_BIGENDIAN)
50   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");CHKERRQ(ierr);
51 #else
52   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");CHKERRQ(ierr);
53 #endif
54   ierr = PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
55 
56   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
57   rloc[0] = info.xs;
58   rloc[1] = info.xm;
59   rloc[2] = info.ys;
60   rloc[3] = info.ym;
61   rloc[4] = info.zs;
62   rloc[5] = info.zm;
63   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
64 
65   /* Write XML header */
66   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
67   boffset   = 0;                /* Offset into binary file */
68   for (r=0; r<size; r++) {
69     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
70     if (!rank) {
71       xs     = grloc[r][0];
72       xm     = grloc[r][1];
73       ys     = grloc[r][2];
74       ym     = grloc[r][3];
75       zs     = grloc[r][4];
76       zm     = grloc[r][5];
77       nnodes = xm*ym*zm;
78     }
79     maxnnodes = PetscMax(maxnnodes,nnodes);
80 #if 0
81     switch (dim) {
82     case 1:
83       ierr = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,0,0,0,0);CHKERRQ(ierr);
84       break;
85     case 2:
86       ierr = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm,ys+ym-1,xs,xs+xm-1,0,0);CHKERRQ(ierr);
87       break;
88     case 3:
89       ierr = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
90       break;
91     default: SETERRQ1(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"No support for dimension %D",dim);
92     }
93 #endif
94     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
95     ierr     = PetscFPrintf(comm,fp,"      <Points>\n");CHKERRQ(ierr);
96     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
97     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
98     ierr     = PetscFPrintf(comm,fp,"      </Points>\n");CHKERRQ(ierr);
99 
100     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
101     for (link=vtk->link; link; link=link->next) {
102       Vec        X        = (Vec)link->vec;
103       const char *vecname = "";
104       if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
105         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
106       }
107       for (i=0; i<bs; i++) {
108         char       buf[256];
109         const char *fieldname;
110         ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
111         if (!fieldname) {
112           ierr      = PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);CHKERRQ(ierr);
113           fieldname = buf;
114         }
115         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
116         boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
117       }
118     }
119     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
120     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
121   }
122   ierr = PetscFPrintf(comm,fp,"  </StructuredGrid>\n");CHKERRQ(ierr);
123   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
124   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
125 
126   /* Now write the arrays. */
127   tag  = ((PetscObject)viewer)->tag;
128   ierr = PetscMalloc2(maxnnodes*PetscMax(3,bs),&array,maxnnodes*3,&array2);CHKERRQ(ierr);
129   for (r=0; r<size; r++) {
130     MPI_Status status;
131     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
132     if (!rank) {
133       xs     = grloc[r][0];
134       xm     = grloc[r][1];
135       ys     = grloc[r][2];
136       ym     = grloc[r][3];
137       zs     = grloc[r][4];
138       zm     = grloc[r][5];
139       nnodes = xm*ym*zm;
140     } else if (r == rank) {
141       nnodes = info.xm*info.ym*info.zm;
142     }
143 
144     /* Write the coordinates */
145     if (Coords) {
146       const PetscScalar *coords;
147       ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
148       if (!rank) {
149         if (r) {
150           PetscMPIInt nn;
151           ierr = MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
152           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
153           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
154         } else {
155           ierr = PetscMemcpy(array,coords,nnodes*cdim*sizeof(PetscScalar));CHKERRQ(ierr);
156         }
157         /* Transpose coordinates to VTK (C-style) ordering */
158         for (k=0; k<zm; k++) {
159           for (j=0; j<ym; j++) {
160             for (i=0; i<xm; i++) {
161               PetscInt Iloc = i+xm*(j+ym*k);
162               array2[Iloc*3+0] = array[Iloc*cdim + 0];
163               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
164               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
165             }
166           }
167         }
168       } else if (r == rank) {
169         ierr = MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
170       }
171       ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
172     } else {       /* Fabricate some coordinates using grid index */
173       for (k=0; k<zm; k++) {
174         for (j=0; j<ym; j++) {
175           for (i=0; i<xm; i++) {
176             PetscInt Iloc = i+xm*(j+ym*k);
177             array2[Iloc*3+0] = xs+i;
178             array2[Iloc*3+1] = ys+j;
179             array2[Iloc*3+2] = zs+k;
180           }
181         }
182       }
183     }
184     ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,PETSC_SCALAR);CHKERRQ(ierr);
185 
186     /* Write each of the objects queued up for this file */
187     for (link=vtk->link; link; link=link->next) {
188       Vec               X = (Vec)link->vec;
189       const PetscScalar *x;
190 
191       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
192       if (!rank) {
193         if (r) {
194           PetscMPIInt nn;
195           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
196           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
197           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
198         } else {
199           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
200         }
201         for (f=0; f<bs; f++) {
202           /* Extract and transpose the f'th field */
203           for (k=0; k<zm; k++) {
204             for (j=0; j<ym; j++) {
205               for (i=0; i<xm; i++) {
206                 PetscInt Iloc = i+xm*(j+ym*k);
207                 array2[Iloc] = array[Iloc*bs + f];
208               }
209             }
210           }
211           ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);CHKERRQ(ierr);
212         }
213       } else if (r == rank) {
214         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
215       }
216       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
217     }
218   }
219   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
220   ierr = PetscFree(grloc);CHKERRQ(ierr);
221 
222   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
223   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
224   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
225   PetscFunctionReturn(0);
226 }
227 
228 
229 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
230 {
231 #if defined(PETSC_USE_REAL_SINGLE)
232   const char precision[] = "Float32";
233 #elif defined(PETSC_USE_REAL_DOUBLE)
234   const char precision[] = "Float64";
235 #else
236   const char precision[] = "UnknownPrecision";
237 #endif
238   MPI_Comm                 comm;
239   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
240   PetscViewerVTKObjectLink link;
241   FILE                     *fp;
242   PetscMPIInt              rank,size,tag;
243   DMDALocalInfo            info;
244   PetscInt                 dim,mx,my,mz,bs,boffset,maxnnodes,i,j,k,f,r;
245   PetscInt                 rloc[6],(*grloc)[6] = NULL;
246   PetscScalar              *array,*array2;
247   PetscReal                gmin[3],gmax[3];
248   PetscErrorCode           ierr;
249 
250   PetscFunctionBegin;
251   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
252 #if defined(PETSC_USE_COMPLEX)
253   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
254 #endif
255   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
256   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
257   ierr = DMDAGetInfo(da,&dim, &mx,&my,&mz, 0,0,0, &bs,0,0,0,0,0);CHKERRQ(ierr);
258   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
259   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
260   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
261   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
262 #if defined(PETSC_WORDS_BIGENDIAN)
263   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");CHKERRQ(ierr);
264 #else
265   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");CHKERRQ(ierr);
266 #endif
267   ierr = PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
268 
269   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
270   rloc[0] = info.xs;
271   rloc[1] = info.xm;
272   rloc[2] = info.ys;
273   rloc[3] = info.ym;
274   rloc[4] = info.zs;
275   rloc[5] = info.zm;
276   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
277 
278   /* Write XML header */
279   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
280   boffset   = 0;                /* Offset into binary file */
281   for (r=0; r<size; r++) {
282     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
283     if (!rank) {
284       xs     = grloc[r][0];
285       xm     = grloc[r][1];
286       ys     = grloc[r][2];
287       ym     = grloc[r][3];
288       zs     = grloc[r][4];
289       zm     = grloc[r][5];
290       nnodes = xm*ym*zm;
291     }
292     maxnnodes = PetscMax(maxnnodes,nnodes);
293     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
294     ierr     = PetscFPrintf(comm,fp,"      <Coordinates>\n");CHKERRQ(ierr);
295     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
296     boffset += xm*sizeof(PetscScalar) + sizeof(int);
297     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
298     boffset += ym*sizeof(PetscScalar) + sizeof(int);
299     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
300     boffset += zm*sizeof(PetscScalar) + sizeof(int);
301     ierr     = PetscFPrintf(comm,fp,"      </Coordinates>\n");CHKERRQ(ierr);
302     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
303     for (link=vtk->link; link; link=link->next) {
304       Vec        X        = (Vec)link->vec;
305       const char *vecname = "";
306       if (((PetscObject)X)->name || link != vtk->link) { /* If the object is already named, use it. If it is past the first link, name it to disambiguate. */
307         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
308       }
309       for (i=0; i<bs; i++) {
310         char       buf[256];
311         const char *fieldname;
312         ierr = DMDAGetFieldName(da,i,&fieldname);CHKERRQ(ierr);
313         if (!fieldname) {
314           ierr      = PetscSNPrintf(buf,sizeof(buf),"Unnamed%D",i);CHKERRQ(ierr);
315           fieldname = buf;
316         }
317         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
318         boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
319       }
320     }
321     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
322     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
323   }
324   ierr = PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");CHKERRQ(ierr);
325   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
326   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
327 
328   /* Now write the arrays. */
329   tag  = ((PetscObject)viewer)->tag;
330   ierr = PetscMalloc2(PetscMax(maxnnodes*bs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*3,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr);
331   for (r=0; r<size; r++) {
332     MPI_Status status;
333     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
334     if (!rank) {
335       xs     = grloc[r][0];
336       xm     = grloc[r][1];
337       ys     = grloc[r][2];
338       ym     = grloc[r][3];
339       zs     = grloc[r][4];
340       zm     = grloc[r][5];
341       nnodes = xm*ym*zm;
342     } else if (r == rank) {
343       nnodes = info.xm*info.ym*info.zm;
344     }
345     {                           /* Write the coordinates */
346       Vec Coords;
347       ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
348       if (Coords) {
349         const PetscScalar *coords;
350         ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
351         if (!rank) {
352           if (r) {
353             PetscMPIInt nn;
354             ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
355             ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
356             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
357           } else {
358             /* x coordinates */
359             for (j=0, k=0, i=0; i<xm; i++) {
360               PetscInt Iloc = i+xm*(j+ym*k);
361               array[i] = coords[Iloc*dim + 0];
362             }
363             /* y coordinates */
364             for (i=0, k=0, j=0; j<ym; j++) {
365               PetscInt Iloc = i+xm*(j+ym*k);
366               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
367             }
368             /* z coordinates */
369             for (i=0, j=0, k=0; k<zm; k++) {
370               PetscInt Iloc = i+xm*(j+ym*k);
371               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
372             }
373           }
374         } else if (r == rank) {
375           xm = info.xm;
376           ym = info.ym;
377           zm = info.zm;
378           for (j=0, k=0, i=0; i<xm; i++) {
379             PetscInt Iloc = i+xm*(j+ym*k);
380             array2[i] = coords[Iloc*dim + 0];
381           }
382           for (i=0, k=0, j=0; j<ym; j++) {
383             PetscInt Iloc = i+xm*(j+ym*k);
384             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
385           }
386           for (i=0, j=0, k=0; k<zm; k++) {
387             PetscInt Iloc = i+xm*(j+ym*k);
388             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
389           }
390           ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
391         }
392         ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
393       } else {       /* Fabricate some coordinates using grid index */
394         for (i=0; i<xm; i++) {
395           array[i] = xs+i;
396         }
397         for (j=0; j<ym; j++) {
398           array[j+xm] = ys+j;
399         }
400         for (k=0; k<zm; k++) {
401           array[k+xm+ym] = zs+k;
402         }
403       }
404       if (!rank) {
405         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,PETSC_SCALAR);CHKERRQ(ierr);
406         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,PETSC_SCALAR);CHKERRQ(ierr);
407         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,PETSC_SCALAR);CHKERRQ(ierr);
408       }
409     }
410 
411     /* Write each of the objects queued up for this file */
412     for (link=vtk->link; link; link=link->next) {
413       Vec               X = (Vec)link->vec;
414       const PetscScalar *x;
415 
416       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
417       if (!rank) {
418         if (r) {
419           PetscMPIInt nn;
420           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
421           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
422           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
423         } else {
424           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
425         }
426         for (f=0; f<bs; f++) {
427           /* Extract and transpose the f'th field */
428           for (k=0; k<zm; k++) {
429             for (j=0; j<ym; j++) {
430               for (i=0; i<xm; i++) {
431                 PetscInt Iloc = i+xm*(j+ym*k);
432                 array2[Iloc] = array[Iloc*bs + f];
433               }
434             }
435           }
436           ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,PETSC_SCALAR);CHKERRQ(ierr);
437         }
438       } else if (r == rank) {
439         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
440       }
441       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
442     }
443   }
444   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
445   ierr = PetscFree(grloc);CHKERRQ(ierr);
446 
447   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
448   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
449   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
450   PetscFunctionReturn(0);
451 }
452 
453 /*@C
454    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
455 
456    Collective
457 
458    Input Arguments:
459    odm - DM specifying the grid layout, passed as a PetscObject
460    viewer - viewer of type VTK
461 
462    Level: developer
463 
464    Note:
465    This function is a callback used by the VTK viewer to actually write the file.
466    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
467    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
468 
469 .seealso: PETSCVIEWERVTK
470 @*/
471 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
472 {
473   DM             dm = (DM)odm;
474   PetscBool      isvtk;
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
479   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
480   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr);
481   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
482   switch (viewer->format) {
483   case PETSC_VIEWER_VTK_VTS:
484     ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr);
485     break;
486   case PETSC_VIEWER_VTK_VTR:
487     ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr);
488     break;
489   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
490   }
491   PetscFunctionReturn(0);
492 }
493