xref: /petsc/src/dm/impls/da/grvtk.c (revision 7eda4a2b655d50ced66764fb8992052c2fd6e7d7)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
12 {
13   PetscErrorCode ierr;
14   PetscInt       f,bs;
15 
16   PetscFunctionBegin;
17   ierr = DMDAGetDof(da,&bs);CHKERRQ(ierr);
18   *fieldsnamed = PETSC_FALSE;
19   for (f=0; f<bs; ++f) {
20     const char * fieldname;
21     ierr = DMDAGetFieldName(da,f,&fieldname);CHKERRQ(ierr);
22     if (fieldname) {
23       *fieldsnamed = PETSC_TRUE;
24       break;
25     }
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
31 {
32 #if defined(PETSC_USE_REAL_SINGLE)
33   const char precision[] = "Float32";
34 #elif defined(PETSC_USE_REAL_DOUBLE)
35   const char precision[] = "Float64";
36 #else
37   const char precision[] = "UnknownPrecision";
38 #endif
39   MPI_Comm                 comm;
40   Vec                      Coords;
41   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
42   PetscViewerVTKObjectLink link;
43   FILE                     *fp;
44   PetscMPIInt              rank,size,tag;
45   DMDALocalInfo            info;
46   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
47   PetscInt                 rloc[6],(*grloc)[6] = NULL;
48   PetscScalar              *array,*array2;
49   PetscReal                gmin[3],gmax[3];
50   PetscErrorCode           ierr;
51 
52   PetscFunctionBegin;
53   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
54 #if defined(PETSC_USE_COMPLEX)
55   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
56 #endif
57   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
58   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
59   ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
60   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
61   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
62   ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
63   if (Coords) {
64     PetscInt csize;
65     ierr = VecGetSize(Coords,&csize);CHKERRQ(ierr);
66     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
67     cdim = csize/(mx*my*mz);
68     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
69   } else {
70     cdim = dim;
71   }
72 
73   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
74   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
75 #if defined(PETSC_WORDS_BIGENDIAN)
76   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");CHKERRQ(ierr);
77 #else
78   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");CHKERRQ(ierr);
79 #endif
80   ierr = PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
81 
82   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
83   rloc[0] = info.xs;
84   rloc[1] = info.xm;
85   rloc[2] = info.ys;
86   rloc[3] = info.ym;
87   rloc[4] = info.zs;
88   rloc[5] = info.zm;
89   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
90 
91   /* Write XML header */
92   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
93   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
94   boffset   = 0;                /* Offset into binary file */
95   for (r=0; r<size; r++) {
96     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
97     if (!rank) {
98       xs     = grloc[r][0];
99       xm     = grloc[r][1];
100       ys     = grloc[r][2];
101       ym     = grloc[r][3];
102       zs     = grloc[r][4];
103       zm     = grloc[r][5];
104       nnodes = xm*ym*zm;
105     }
106     maxnnodes = PetscMax(maxnnodes,nnodes);
107     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
108     ierr     = PetscFPrintf(comm,fp,"      <Points>\n");CHKERRQ(ierr);
109     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
110     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
111     ierr     = PetscFPrintf(comm,fp,"      </Points>\n");CHKERRQ(ierr);
112 
113     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
114     for (link=vtk->link; link; link=link->next) {
115       Vec        X = (Vec)link->vec;
116       PetscInt   bs,f;
117       DM         daCurr;
118       PetscBool  fieldsnamed;
119       const char *vecname = "Unnamed";
120 
121       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
122       ierr = DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
123       maxbs = PetscMax(maxbs,bs);
124 
125       if (((PetscObject)X)->name || link != vtk->link) {
126         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
127       }
128 
129       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
130       ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
131       if (fieldsnamed) {
132         for (f=0; f<bs; f++) {
133           char       buf[256];
134           const char *fieldname;
135           ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr);
136           if (!fieldname) {
137             ierr      = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr);
138             fieldname = buf;
139           }
140           ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
141           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
142         }
143       } else {
144         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr);
145         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
146       }
147     }
148     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
149     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
150   }
151   ierr = PetscFPrintf(comm,fp,"  </StructuredGrid>\n");CHKERRQ(ierr);
152   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
153   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
154 
155   /* Now write the arrays. */
156   tag  = ((PetscObject)viewer)->tag;
157   ierr = PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);CHKERRQ(ierr);
158   for (r=0; r<size; r++) {
159     MPI_Status status;
160     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
161     if (!rank) {
162       xs     = grloc[r][0];
163       xm     = grloc[r][1];
164       ys     = grloc[r][2];
165       ym     = grloc[r][3];
166       zs     = grloc[r][4];
167       zm     = grloc[r][5];
168       nnodes = xm*ym*zm;
169     } else if (r == rank) {
170       nnodes = info.xm*info.ym*info.zm;
171     }
172 
173     /* Write the coordinates */
174     if (Coords) {
175       const PetscScalar *coords;
176       ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
177       if (!rank) {
178         if (r) {
179           PetscMPIInt nn;
180           ierr = MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
181           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
182           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
183         } else {
184           ierr = PetscMemcpy(array,coords,nnodes*cdim*sizeof(PetscScalar));CHKERRQ(ierr);
185         }
186         /* Transpose coordinates to VTK (C-style) ordering */
187         for (k=0; k<zm; k++) {
188           for (j=0; j<ym; j++) {
189             for (i=0; i<xm; i++) {
190               PetscInt Iloc = i+xm*(j+ym*k);
191               array2[Iloc*3+0] = array[Iloc*cdim + 0];
192               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
193               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
194             }
195           }
196         }
197       } else if (r == rank) {
198         ierr = MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
199       }
200       ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
201     } else {       /* Fabricate some coordinates using grid index */
202       for (k=0; k<zm; k++) {
203         for (j=0; j<ym; j++) {
204           for (i=0; i<xm; i++) {
205             PetscInt Iloc = i+xm*(j+ym*k);
206             array2[Iloc*3+0] = xs+i;
207             array2[Iloc*3+1] = ys+j;
208             array2[Iloc*3+2] = zs+k;
209           }
210         }
211       }
212     }
213     ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);CHKERRQ(ierr);
214 
215     /* Write each of the objects queued up for this file */
216     for (link=vtk->link; link; link=link->next) {
217       Vec               X = (Vec)link->vec;
218       const PetscScalar *x;
219       PetscInt          bs,f;
220       DM                daCurr;
221       PetscBool         fieldsnamed;
222       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
223       ierr = DMDAGetInfo(daCurr,0,0,0,0, 0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
224       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
225       if (!rank) {
226         if (r) {
227           PetscMPIInt nn;
228           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
229           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
230           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
231         } else {
232           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
233         }
234 
235         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
236         ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
237         if (fieldsnamed) {
238           for (f=0; f<bs; f++) {
239             /* Extract and transpose the f'th field */
240             for (k=0; k<zm; k++) {
241               for (j=0; j<ym; j++) {
242                 for (i=0; i<xm; i++) {
243                   PetscInt Iloc = i+xm*(j+ym*k);
244                   array2[Iloc] = array[Iloc*bs + f];
245                 }
246               }
247             }
248             ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr);
249           }
250         } else {
251           ierr = PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);CHKERRQ(ierr);
252         }
253       } else if (r == rank) {
254         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
255       }
256       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
257     }
258   }
259   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
260   ierr = PetscFree(grloc);CHKERRQ(ierr);
261 
262   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
263   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
264   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
265   PetscFunctionReturn(0);
266 }
267 
268 
269 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
270 {
271 #if defined(PETSC_USE_REAL_SINGLE)
272   const char precision[] = "Float32";
273 #elif defined(PETSC_USE_REAL_DOUBLE)
274   const char precision[] = "Float64";
275 #else
276   const char precision[] = "UnknownPrecision";
277 #endif
278   MPI_Comm                 comm;
279   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
280   PetscViewerVTKObjectLink link;
281   FILE                     *fp;
282   PetscMPIInt              rank,size,tag;
283   DMDALocalInfo            info;
284   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
285   PetscInt                 rloc[6],(*grloc)[6] = NULL;
286   PetscScalar              *array,*array2;
287   PetscReal                gmin[3],gmax[3];
288   PetscErrorCode           ierr;
289 
290   PetscFunctionBegin;
291   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
292 #if defined(PETSC_USE_COMPLEX)
293   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
294 #endif
295   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
296   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
297   ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr);
298   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
299   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
300   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
301   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
302 #if defined(PETSC_WORDS_BIGENDIAN)
303   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"BigEndian\">\n");CHKERRQ(ierr);
304 #else
305   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n");CHKERRQ(ierr);
306 #endif
307   ierr = PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
308 
309   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
310   rloc[0] = info.xs;
311   rloc[1] = info.xm;
312   rloc[2] = info.ys;
313   rloc[3] = info.ym;
314   rloc[4] = info.zs;
315   rloc[5] = info.zm;
316   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
317 
318   /* Write XML header */
319   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
320   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
321   boffset   = 0;                /* Offset into binary file */
322   for (r=0; r<size; r++) {
323     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
324     if (!rank) {
325       xs     = grloc[r][0];
326       xm     = grloc[r][1];
327       ys     = grloc[r][2];
328       ym     = grloc[r][3];
329       zs     = grloc[r][4];
330       zm     = grloc[r][5];
331       nnodes = xm*ym*zm;
332     }
333     maxnnodes = PetscMax(maxnnodes,nnodes);
334     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
335     ierr     = PetscFPrintf(comm,fp,"      <Coordinates>\n");CHKERRQ(ierr);
336     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
337     boffset += xm*sizeof(PetscScalar) + sizeof(int);
338     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
339     boffset += ym*sizeof(PetscScalar) + sizeof(int);
340     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
341     boffset += zm*sizeof(PetscScalar) + sizeof(int);
342     ierr     = PetscFPrintf(comm,fp,"      </Coordinates>\n");CHKERRQ(ierr);
343     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
344     for (link=vtk->link; link; link=link->next) {
345       Vec        X = (Vec)link->vec;
346       PetscInt   bs,f;
347       DM         daCurr;
348       PetscBool  fieldsnamed;
349       const char *vecname = "Unnamed";
350 
351       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
352       ierr = DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
353       maxbs = PetscMax(maxbs,bs);
354       if (((PetscObject)X)->name || link != vtk->link) {
355         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
356       }
357 
358       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
359       ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
360       if (fieldsnamed) {
361         for (f=0; f<bs; f++) {
362           char       buf[256];
363           const char *fieldname;
364           ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr);
365           if (!fieldname) {
366             ierr      = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr);
367             fieldname = buf;
368           }
369           ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
370           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
371         }
372       } else {
373         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr);
374         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
375       }
376     }
377     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
378     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
379   }
380   ierr = PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");CHKERRQ(ierr);
381   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
382   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
383 
384   /* Now write the arrays. */
385   tag  = ((PetscObject)viewer)->tag;
386   ierr = PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr);
387   for (r=0; r<size; r++) {
388     MPI_Status status;
389     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
390     if (!rank) {
391       xs     = grloc[r][0];
392       xm     = grloc[r][1];
393       ys     = grloc[r][2];
394       ym     = grloc[r][3];
395       zs     = grloc[r][4];
396       zm     = grloc[r][5];
397       nnodes = xm*ym*zm;
398     } else if (r == rank) {
399       nnodes = info.xm*info.ym*info.zm;
400     }
401     {                           /* Write the coordinates */
402       Vec Coords;
403 
404       ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
405       if (Coords) {
406         const PetscScalar *coords;
407         ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
408         if (!rank) {
409           if (r) {
410             PetscMPIInt nn;
411             ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
412             ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
413             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
414           } else {
415             /* x coordinates */
416             for (j=0, k=0, i=0; i<xm; i++) {
417               PetscInt Iloc = i+xm*(j+ym*k);
418               array[i] = coords[Iloc*dim + 0];
419             }
420             /* y coordinates */
421             for (i=0, k=0, j=0; j<ym; j++) {
422               PetscInt Iloc = i+xm*(j+ym*k);
423               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
424             }
425             /* z coordinates */
426             for (i=0, j=0, k=0; k<zm; k++) {
427               PetscInt Iloc = i+xm*(j+ym*k);
428               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
429             }
430           }
431         } else if (r == rank) {
432           xm = info.xm;
433           ym = info.ym;
434           zm = info.zm;
435           for (j=0, k=0, i=0; i<xm; i++) {
436             PetscInt Iloc = i+xm*(j+ym*k);
437             array2[i] = coords[Iloc*dim + 0];
438           }
439           for (i=0, k=0, j=0; j<ym; j++) {
440             PetscInt Iloc = i+xm*(j+ym*k);
441             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
442           }
443           for (i=0, j=0, k=0; k<zm; k++) {
444             PetscInt Iloc = i+xm*(j+ym*k);
445             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
446           }
447           ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
448         }
449         ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
450       } else {       /* Fabricate some coordinates using grid index */
451         for (i=0; i<xm; i++) {
452           array[i] = xs+i;
453         }
454         for (j=0; j<ym; j++) {
455           array[j+xm] = ys+j;
456         }
457         for (k=0; k<zm; k++) {
458           array[k+xm+ym] = zs+k;
459         }
460       }
461       if (!rank) {
462         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);CHKERRQ(ierr);
463         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);CHKERRQ(ierr);
464         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);CHKERRQ(ierr);
465       }
466     }
467 
468     /* Write each of the objects queued up for this file */
469     for (link=vtk->link; link; link=link->next) {
470       Vec               X = (Vec)link->vec;
471       const PetscScalar *x;
472       PetscInt          bs,f;
473       DM                daCurr;
474       PetscBool         fieldsnamed;
475       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
476       ierr = DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
477 
478       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
479       if (!rank) {
480         if (r) {
481           PetscMPIInt nn;
482           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
483           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
484           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
485         } else {
486           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
487         }
488         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
489         ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
490         if (fieldsnamed) {
491           for (f=0; f<bs; f++) {
492             /* Extract and transpose the f'th field */
493             for (k=0; k<zm; k++) {
494               for (j=0; j<ym; j++) {
495                 for (i=0; i<xm; i++) {
496                   PetscInt Iloc = i+xm*(j+ym*k);
497                   array2[Iloc] = array[Iloc*bs + f];
498                 }
499               }
500             }
501             ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr);
502           }
503         }
504         ierr = PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);CHKERRQ(ierr);
505 
506       } else if (r == rank) {
507         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
508       }
509       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
510     }
511   }
512   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
513   ierr = PetscFree(grloc);CHKERRQ(ierr);
514 
515   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
516   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
517   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
518   PetscFunctionReturn(0);
519 }
520 
521 /*@C
522    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
523 
524    Collective
525 
526    Input Arguments:
527    odm - DM specifying the grid layout, passed as a PetscObject
528    viewer - viewer of type VTK
529 
530    Level: developer
531 
532    Notes:
533    This function is a callback used by the VTK viewer to actually write the file.
534    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
535    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
536 
537    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
538    fields are written. Otherwise, a single multi-dof (vector) field is written.
539 
540 .seealso: PETSCVIEWERVTK
541 @*/
542 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
543 {
544   DM             dm = (DM)odm;
545   PetscBool      isvtk;
546   PetscErrorCode ierr;
547 
548   PetscFunctionBegin;
549   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
550   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
551   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr);
552   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
553   switch (viewer->format) {
554   case PETSC_VIEWER_VTK_VTS:
555     ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr);
556     break;
557   case PETSC_VIEWER_VTK_VTR:
558     ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr);
559     break;
560   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
561   }
562   PetscFunctionReturn(0);
563 }
564