xref: /petsc/src/dm/impls/da/grvtk.c (revision fc8a9adeb7fcdc98711d755fa2dc544ddccf0f3e)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
12 {
13   PetscErrorCode ierr;
14   PetscInt       f,bs;
15 
16   PetscFunctionBegin;
17   ierr = DMDAGetDof(da,&bs);CHKERRQ(ierr);
18   *fieldsnamed = PETSC_FALSE;
19   for (f=0; f<bs; ++f) {
20     const char * fieldname;
21     ierr = DMDAGetFieldName(da,f,&fieldname);CHKERRQ(ierr);
22     if (fieldname) {
23       *fieldsnamed = PETSC_TRUE;
24       break;
25     }
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
31 {
32   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
33 #if defined(PETSC_USE_REAL_SINGLE)
34   const char precision[] = "Float32";
35 #elif defined(PETSC_USE_REAL_DOUBLE)
36   const char precision[] = "Float64";
37 #else
38   const char precision[] = "UnknownPrecision";
39 #endif
40   MPI_Comm                 comm;
41   Vec                      Coords;
42   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
43   PetscViewerVTKObjectLink link;
44   FILE                     *fp;
45   PetscMPIInt              rank,size,tag;
46   DMDALocalInfo            info;
47   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
48   PetscInt                 rloc[6],(*grloc)[6] = NULL;
49   PetscScalar              *array,*array2;
50   PetscReal                gmin[3],gmax[3];
51   PetscErrorCode           ierr;
52 
53   PetscFunctionBegin;
54   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
55 #if defined(PETSC_USE_COMPLEX)
56   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
57 #endif
58   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
59   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
60   ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
61   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
62   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
63   ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
64   if (Coords) {
65     PetscInt csize;
66     ierr = VecGetSize(Coords,&csize);CHKERRQ(ierr);
67     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
68     cdim = csize/(mx*my*mz);
69     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
70   } else {
71     cdim = dim;
72   }
73 
74   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
75   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
76   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr);
77   ierr = PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
78 
79   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
80   rloc[0] = info.xs;
81   rloc[1] = info.xm;
82   rloc[2] = info.ys;
83   rloc[3] = info.ym;
84   rloc[4] = info.zs;
85   rloc[5] = info.zm;
86   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
87 
88   /* Write XML header */
89   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
90   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
91   boffset   = 0;                /* Offset into binary file */
92   for (r=0; r<size; r++) {
93     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
94     if (!rank) {
95       xs     = grloc[r][0];
96       xm     = grloc[r][1];
97       ys     = grloc[r][2];
98       ym     = grloc[r][3];
99       zs     = grloc[r][4];
100       zm     = grloc[r][5];
101       nnodes = xm*ym*zm;
102     }
103     maxnnodes = PetscMax(maxnnodes,nnodes);
104     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
105     ierr     = PetscFPrintf(comm,fp,"      <Points>\n");CHKERRQ(ierr);
106     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
107     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
108     ierr     = PetscFPrintf(comm,fp,"      </Points>\n");CHKERRQ(ierr);
109 
110     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
111     for (link=vtk->link; link; link=link->next) {
112       Vec        X = (Vec)link->vec;
113       PetscInt   bs,f;
114       DM         daCurr;
115       PetscBool  fieldsnamed;
116       const char *vecname = "Unnamed";
117 
118       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
119       ierr = DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
120       maxbs = PetscMax(maxbs,bs);
121 
122       if (((PetscObject)X)->name || link != vtk->link) {
123         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
124       }
125 
126       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
127       ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
128       if (fieldsnamed) {
129         for (f=0; f<bs; f++) {
130           char       buf[256];
131           const char *fieldname;
132           ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr);
133           if (!fieldname) {
134             ierr      = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr);
135             fieldname = buf;
136           }
137           ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
138           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
139         }
140       } else {
141         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr);
142         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
143       }
144     }
145     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
146     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
147   }
148   ierr = PetscFPrintf(comm,fp,"  </StructuredGrid>\n");CHKERRQ(ierr);
149   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
150   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
151 
152   /* Now write the arrays. */
153   tag  = ((PetscObject)viewer)->tag;
154   ierr = PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);CHKERRQ(ierr);
155   for (r=0; r<size; r++) {
156     MPI_Status status;
157     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
158     if (!rank) {
159       xs     = grloc[r][0];
160       xm     = grloc[r][1];
161       ys     = grloc[r][2];
162       ym     = grloc[r][3];
163       zs     = grloc[r][4];
164       zm     = grloc[r][5];
165       nnodes = xm*ym*zm;
166     } else if (r == rank) {
167       nnodes = info.xm*info.ym*info.zm;
168     }
169 
170     /* Write the coordinates */
171     if (Coords) {
172       const PetscScalar *coords;
173       ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
174       if (!rank) {
175         if (r) {
176           PetscMPIInt nn;
177           ierr = MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
178           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
179           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
180         } else {
181           ierr = PetscMemcpy(array,coords,nnodes*cdim*sizeof(PetscScalar));CHKERRQ(ierr);
182         }
183         /* Transpose coordinates to VTK (C-style) ordering */
184         for (k=0; k<zm; k++) {
185           for (j=0; j<ym; j++) {
186             for (i=0; i<xm; i++) {
187               PetscInt Iloc = i+xm*(j+ym*k);
188               array2[Iloc*3+0] = array[Iloc*cdim + 0];
189               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
190               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
191             }
192           }
193         }
194       } else if (r == rank) {
195         ierr = MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
196       }
197       ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
198     } else {       /* Fabricate some coordinates using grid index */
199       for (k=0; k<zm; k++) {
200         for (j=0; j<ym; j++) {
201           for (i=0; i<xm; i++) {
202             PetscInt Iloc = i+xm*(j+ym*k);
203             array2[Iloc*3+0] = xs+i;
204             array2[Iloc*3+1] = ys+j;
205             array2[Iloc*3+2] = zs+k;
206           }
207         }
208       }
209     }
210     ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);CHKERRQ(ierr);
211 
212     /* Write each of the objects queued up for this file */
213     for (link=vtk->link; link; link=link->next) {
214       Vec               X = (Vec)link->vec;
215       const PetscScalar *x;
216       PetscInt          bs,f;
217       DM                daCurr;
218       PetscBool         fieldsnamed;
219       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
220       ierr = DMDAGetInfo(daCurr,0,0,0,0, 0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
221       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
222       if (!rank) {
223         if (r) {
224           PetscMPIInt nn;
225           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
226           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
227           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
228         } else {
229           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
230         }
231 
232         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
233         ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
234         if (fieldsnamed) {
235           for (f=0; f<bs; f++) {
236             /* Extract and transpose the f'th field */
237             for (k=0; k<zm; k++) {
238               for (j=0; j<ym; j++) {
239                 for (i=0; i<xm; i++) {
240                   PetscInt Iloc = i+xm*(j+ym*k);
241                   array2[Iloc] = array[Iloc*bs + f];
242                 }
243               }
244             }
245             ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr);
246           }
247         } else {
248           ierr = PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);CHKERRQ(ierr);
249         }
250       } else if (r == rank) {
251         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
252       }
253       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
254     }
255   }
256   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
257   ierr = PetscFree(grloc);CHKERRQ(ierr);
258 
259   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
260   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
261   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
262   PetscFunctionReturn(0);
263 }
264 
265 
266 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
267 {
268   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
269 #if defined(PETSC_USE_REAL_SINGLE)
270   const char precision[] = "Float32";
271 #elif defined(PETSC_USE_REAL_DOUBLE)
272   const char precision[] = "Float64";
273 #else
274   const char precision[] = "UnknownPrecision";
275 #endif
276   MPI_Comm                 comm;
277   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
278   PetscViewerVTKObjectLink link;
279   FILE                     *fp;
280   PetscMPIInt              rank,size,tag;
281   DMDALocalInfo            info;
282   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
283   PetscInt                 rloc[6],(*grloc)[6] = NULL;
284   PetscScalar              *array,*array2;
285   PetscReal                gmin[3],gmax[3];
286   PetscErrorCode           ierr;
287 
288   PetscFunctionBegin;
289   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
290 #if defined(PETSC_USE_COMPLEX)
291   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
292 #endif
293   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
294   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
295   ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr);
296   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
297   ierr = DMDAGetBoundingBox(da,gmin,gmax);CHKERRQ(ierr);
298   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
299   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
300   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr);
301   ierr = PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
302 
303   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
304   rloc[0] = info.xs;
305   rloc[1] = info.xm;
306   rloc[2] = info.ys;
307   rloc[3] = info.ym;
308   rloc[4] = info.zs;
309   rloc[5] = info.zm;
310   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
311 
312   /* Write XML header */
313   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
314   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
315   boffset   = 0;                /* Offset into binary file */
316   for (r=0; r<size; r++) {
317     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
318     if (!rank) {
319       xs     = grloc[r][0];
320       xm     = grloc[r][1];
321       ys     = grloc[r][2];
322       ym     = grloc[r][3];
323       zs     = grloc[r][4];
324       zm     = grloc[r][5];
325       nnodes = xm*ym*zm;
326     }
327     maxnnodes = PetscMax(maxnnodes,nnodes);
328     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
329     ierr     = PetscFPrintf(comm,fp,"      <Coordinates>\n");CHKERRQ(ierr);
330     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
331     boffset += xm*sizeof(PetscScalar) + sizeof(int);
332     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
333     boffset += ym*sizeof(PetscScalar) + sizeof(int);
334     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
335     boffset += zm*sizeof(PetscScalar) + sizeof(int);
336     ierr     = PetscFPrintf(comm,fp,"      </Coordinates>\n");CHKERRQ(ierr);
337     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
338     for (link=vtk->link; link; link=link->next) {
339       Vec        X = (Vec)link->vec;
340       PetscInt   bs,f;
341       DM         daCurr;
342       PetscBool  fieldsnamed;
343       const char *vecname = "Unnamed";
344 
345       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
346       ierr = DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
347       maxbs = PetscMax(maxbs,bs);
348       if (((PetscObject)X)->name || link != vtk->link) {
349         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
350       }
351 
352       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
353       ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
354       if (fieldsnamed) {
355         for (f=0; f<bs; f++) {
356           char       buf[256];
357           const char *fieldname;
358           ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr);
359           if (!fieldname) {
360             ierr      = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr);
361             fieldname = buf;
362           }
363           ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
364           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
365         }
366       } else {
367         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr);
368         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
369       }
370     }
371     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
372     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
373   }
374   ierr = PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");CHKERRQ(ierr);
375   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
376   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
377 
378   /* Now write the arrays. */
379   tag  = ((PetscObject)viewer)->tag;
380   ierr = PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr);
381   for (r=0; r<size; r++) {
382     MPI_Status status;
383     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
384     if (!rank) {
385       xs     = grloc[r][0];
386       xm     = grloc[r][1];
387       ys     = grloc[r][2];
388       ym     = grloc[r][3];
389       zs     = grloc[r][4];
390       zm     = grloc[r][5];
391       nnodes = xm*ym*zm;
392     } else if (r == rank) {
393       nnodes = info.xm*info.ym*info.zm;
394     }
395     {                           /* Write the coordinates */
396       Vec Coords;
397 
398       ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
399       if (Coords) {
400         const PetscScalar *coords;
401         ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
402         if (!rank) {
403           if (r) {
404             PetscMPIInt nn;
405             ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
406             ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
407             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
408           } else {
409             /* x coordinates */
410             for (j=0, k=0, i=0; i<xm; i++) {
411               PetscInt Iloc = i+xm*(j+ym*k);
412               array[i] = coords[Iloc*dim + 0];
413             }
414             /* y coordinates */
415             for (i=0, k=0, j=0; j<ym; j++) {
416               PetscInt Iloc = i+xm*(j+ym*k);
417               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
418             }
419             /* z coordinates */
420             for (i=0, j=0, k=0; k<zm; k++) {
421               PetscInt Iloc = i+xm*(j+ym*k);
422               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
423             }
424           }
425         } else if (r == rank) {
426           xm = info.xm;
427           ym = info.ym;
428           zm = info.zm;
429           for (j=0, k=0, i=0; i<xm; i++) {
430             PetscInt Iloc = i+xm*(j+ym*k);
431             array2[i] = coords[Iloc*dim + 0];
432           }
433           for (i=0, k=0, j=0; j<ym; j++) {
434             PetscInt Iloc = i+xm*(j+ym*k);
435             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
436           }
437           for (i=0, j=0, k=0; k<zm; k++) {
438             PetscInt Iloc = i+xm*(j+ym*k);
439             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
440           }
441           ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
442         }
443         ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
444       } else {       /* Fabricate some coordinates using grid index */
445         for (i=0; i<xm; i++) {
446           array[i] = xs+i;
447         }
448         for (j=0; j<ym; j++) {
449           array[j+xm] = ys+j;
450         }
451         for (k=0; k<zm; k++) {
452           array[k+xm+ym] = zs+k;
453         }
454       }
455       if (!rank) {
456         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);CHKERRQ(ierr);
457         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);CHKERRQ(ierr);
458         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);CHKERRQ(ierr);
459       }
460     }
461 
462     /* Write each of the objects queued up for this file */
463     for (link=vtk->link; link; link=link->next) {
464       Vec               X = (Vec)link->vec;
465       const PetscScalar *x;
466       PetscInt          bs,f;
467       DM                daCurr;
468       PetscBool         fieldsnamed;
469       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
470       ierr = DMDAGetInfo(daCurr,0,0,0,0,0,0,0,&bs,0,0,0,0,0);CHKERRQ(ierr);
471 
472       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
473       if (!rank) {
474         if (r) {
475           PetscMPIInt nn;
476           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
477           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
478           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
479         } else {
480           ierr = PetscMemcpy(array,x,nnodes*bs*sizeof(PetscScalar));CHKERRQ(ierr);
481         }
482         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
483         ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
484         if (fieldsnamed) {
485           for (f=0; f<bs; f++) {
486             /* Extract and transpose the f'th field */
487             for (k=0; k<zm; k++) {
488               for (j=0; j<ym; j++) {
489                 for (i=0; i<xm; i++) {
490                   PetscInt Iloc = i+xm*(j+ym*k);
491                   array2[Iloc] = array[Iloc*bs + f];
492                 }
493               }
494             }
495             ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr);
496           }
497         }
498         ierr = PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);CHKERRQ(ierr);
499 
500       } else if (r == rank) {
501         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
502       }
503       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
504     }
505   }
506   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
507   ierr = PetscFree(grloc);CHKERRQ(ierr);
508 
509   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
510   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
511   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
512   PetscFunctionReturn(0);
513 }
514 
515 /*@C
516    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
517 
518    Collective
519 
520    Input Arguments:
521    odm - DM specifying the grid layout, passed as a PetscObject
522    viewer - viewer of type VTK
523 
524    Level: developer
525 
526    Notes:
527    This function is a callback used by the VTK viewer to actually write the file.
528    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
529    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
530 
531    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
532    fields are written. Otherwise, a single multi-dof (vector) field is written.
533 
534 .seealso: PETSCVIEWERVTK
535 @*/
536 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
537 {
538   DM             dm = (DM)odm;
539   PetscBool      isvtk;
540   PetscErrorCode ierr;
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
544   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
545   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr);
546   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
547   switch (viewer->format) {
548   case PETSC_VIEWER_VTK_VTS:
549     ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr);
550     break;
551   case PETSC_VIEWER_VTK_VTR:
552     ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr);
553     break;
554   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
555   }
556   PetscFunctionReturn(0);
557 }
558