xref: /petsc/src/dm/impls/da/grvtk.c (revision 2da392cc7c10228af19ad9843ce5155178acb644)
1 #include <petsc/private/dmdaimpl.h>
2 /*
3    Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires
4    including the private vtkvimpl.h file. The code should be refactored.
5 */
6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h>
7 
8 /* Helper function which determines if any DMDA fields are named.  This is used
9    as a proxy for the user's intention to use DMDA fields as distinct
10    scalar-valued fields as opposed to a single vector-valued field */
11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed)
12 {
13   PetscErrorCode ierr;
14   PetscInt       f,bs;
15 
16   PetscFunctionBegin;
17   *fieldsnamed = PETSC_FALSE;
18   ierr = DMDAGetDof(da,&bs);CHKERRQ(ierr);
19   for (f=0; f<bs; ++f) {
20     const char * fieldname;
21     ierr = DMDAGetFieldName(da,f,&fieldname);CHKERRQ(ierr);
22     if (fieldname) {
23       *fieldsnamed = PETSC_TRUE;
24       break;
25     }
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer)
31 {
32   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
33 #if defined(PETSC_USE_REAL_SINGLE)
34   const char precision[] = "Float32";
35 #elif defined(PETSC_USE_REAL_DOUBLE)
36   const char precision[] = "Float64";
37 #else
38   const char precision[] = "UnknownPrecision";
39 #endif
40   MPI_Comm                 comm;
41   Vec                      Coords;
42   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
43   PetscViewerVTKObjectLink link;
44   FILE                     *fp;
45   PetscMPIInt              rank,size,tag;
46   DMDALocalInfo            info;
47   PetscInt                 dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r;
48   PetscInt                 rloc[6],(*grloc)[6] = NULL;
49   PetscScalar              *array,*array2;
50   PetscErrorCode           ierr;
51 
52   PetscFunctionBegin;
53   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
54 #if defined(PETSC_USE_COMPLEX)
55   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
56 #endif
57   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
58   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
59   ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
60   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
61   ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
62   if (Coords) {
63     PetscInt csize;
64     ierr = VecGetSize(Coords,&csize);CHKERRQ(ierr);
65     if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
66     cdim = csize/(mx*my*mz);
67     if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch");
68   } else {
69     cdim = dim;
70   }
71 
72   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
73   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
74   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr);
75   ierr = PetscFPrintf(comm,fp,"  <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
76 
77   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
78   rloc[0] = info.xs;
79   rloc[1] = info.xm;
80   rloc[2] = info.ys;
81   rloc[3] = info.ym;
82   rloc[4] = info.zs;
83   rloc[5] = info.zm;
84   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
85 
86   /* Write XML header */
87   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
88   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
89   boffset   = 0;                /* Offset into binary file */
90   for (r=0; r<size; r++) {
91     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
92     if (!rank) {
93       xs     = grloc[r][0];
94       xm     = grloc[r][1];
95       ys     = grloc[r][2];
96       ym     = grloc[r][3];
97       zs     = grloc[r][4];
98       zm     = grloc[r][5];
99       nnodes = xm*ym*zm;
100     }
101     maxnnodes = PetscMax(maxnnodes,nnodes);
102     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
103     ierr     = PetscFPrintf(comm,fp,"      <Points>\n");CHKERRQ(ierr);
104     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
105     boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int);
106     ierr     = PetscFPrintf(comm,fp,"      </Points>\n");CHKERRQ(ierr);
107 
108     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
109     for (link=vtk->link; link; link=link->next) {
110       Vec        X = (Vec)link->vec;
111       PetscInt   bs,f;
112       DM         daCurr;
113       PetscBool  fieldsnamed;
114       const char *vecname = "Unnamed";
115 
116       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
117       ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
118       maxbs = PetscMax(maxbs,bs);
119 
120       if (((PetscObject)X)->name || link != vtk->link) {
121         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
122       }
123 
124       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
125       ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
126       if (fieldsnamed) {
127         for (f=0; f<bs; f++) {
128           char       buf[256];
129           const char *fieldname;
130           ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr);
131           if (!fieldname) {
132             ierr      = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr);
133             fieldname = buf;
134           }
135           ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
136           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
137         }
138       } else {
139         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr);
140         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
141       }
142     }
143     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
144     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
145   }
146   ierr = PetscFPrintf(comm,fp,"  </StructuredGrid>\n");CHKERRQ(ierr);
147   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
148   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
149 
150   /* Now write the arrays. */
151   tag  = ((PetscObject)viewer)->tag;
152   ierr = PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);CHKERRQ(ierr);
153   for (r=0; r<size; r++) {
154     MPI_Status status;
155     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
156     if (!rank) {
157       xs     = grloc[r][0];
158       xm     = grloc[r][1];
159       ys     = grloc[r][2];
160       ym     = grloc[r][3];
161       zs     = grloc[r][4];
162       zm     = grloc[r][5];
163       nnodes = xm*ym*zm;
164     } else if (r == rank) {
165       nnodes = info.xm*info.ym*info.zm;
166     }
167 
168     /* Write the coordinates */
169     if (Coords) {
170       const PetscScalar *coords;
171       ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
172       if (!rank) {
173         if (r) {
174           PetscMPIInt nn;
175           ierr = MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
176           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
177           if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
178         } else {
179           ierr = PetscArraycpy(array,coords,nnodes*cdim);CHKERRQ(ierr);
180         }
181         /* Transpose coordinates to VTK (C-style) ordering */
182         for (k=0; k<zm; k++) {
183           for (j=0; j<ym; j++) {
184             for (i=0; i<xm; i++) {
185               PetscInt Iloc = i+xm*(j+ym*k);
186               array2[Iloc*3+0] = array[Iloc*cdim + 0];
187               array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0;
188               array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0;
189             }
190           }
191         }
192       } else if (r == rank) {
193         ierr = MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
194       }
195       ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
196     } else {       /* Fabricate some coordinates using grid index */
197       for (k=0; k<zm; k++) {
198         for (j=0; j<ym; j++) {
199           for (i=0; i<xm; i++) {
200             PetscInt Iloc = i+xm*(j+ym*k);
201             array2[Iloc*3+0] = xs+i;
202             array2[Iloc*3+1] = ys+j;
203             array2[Iloc*3+2] = zs+k;
204           }
205         }
206       }
207     }
208     ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);CHKERRQ(ierr);
209 
210     /* Write each of the objects queued up for this file */
211     for (link=vtk->link; link; link=link->next) {
212       Vec               X = (Vec)link->vec;
213       const PetscScalar *x;
214       PetscInt          bs,f;
215       DM                daCurr;
216       PetscBool         fieldsnamed;
217       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
218       ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
219       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
220       if (!rank) {
221         if (r) {
222           PetscMPIInt nn;
223           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
224           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
225           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
226         } else {
227           ierr = PetscArraycpy(array,x,nnodes*bs);CHKERRQ(ierr);
228         }
229 
230         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
231         ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
232         if (fieldsnamed) {
233           for (f=0; f<bs; f++) {
234             /* Extract and transpose the f'th field */
235             for (k=0; k<zm; k++) {
236               for (j=0; j<ym; j++) {
237                 for (i=0; i<xm; i++) {
238                   PetscInt Iloc = i+xm*(j+ym*k);
239                   array2[Iloc] = array[Iloc*bs + f];
240                 }
241               }
242             }
243             ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr);
244           }
245         } else {
246           ierr = PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);CHKERRQ(ierr);
247         }
248       } else if (r == rank) {
249         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
250       }
251       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
252     }
253   }
254   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
255   ierr = PetscFree(grloc);CHKERRQ(ierr);
256 
257   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
258   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
259   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
260   PetscFunctionReturn(0);
261 }
262 
263 
264 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer)
265 {
266   const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian";
267 #if defined(PETSC_USE_REAL_SINGLE)
268   const char precision[] = "Float32";
269 #elif defined(PETSC_USE_REAL_DOUBLE)
270   const char precision[] = "Float64";
271 #else
272   const char precision[] = "UnknownPrecision";
273 #endif
274   MPI_Comm                 comm;
275   PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
276   PetscViewerVTKObjectLink link;
277   FILE                     *fp;
278   PetscMPIInt              rank,size,tag;
279   DMDALocalInfo            info;
280   PetscInt                 dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r;
281   PetscInt                 rloc[6],(*grloc)[6] = NULL;
282   PetscScalar              *array,*array2;
283   PetscErrorCode           ierr;
284 
285   PetscFunctionBegin;
286   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
287 #if defined(PETSC_USE_COMPLEX)
288   SETERRQ(comm,PETSC_ERR_SUP,"Complex values not supported");
289 #endif
290   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
291   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
292   ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
293   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
294   ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr);
295   ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr);
296   ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr);
297   ierr = PetscFPrintf(comm,fp,"  <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr);
298 
299   if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);}
300   rloc[0] = info.xs;
301   rloc[1] = info.xm;
302   rloc[2] = info.ys;
303   rloc[3] = info.ym;
304   rloc[4] = info.zs;
305   rloc[5] = info.zm;
306   ierr    = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr);
307 
308   /* Write XML header */
309   maxnnodes = 0;                /* Used for the temporary array size on rank 0 */
310   maxbs     = 0;                /* Used for the temporary array size on rank 0 */
311   boffset   = 0;                /* Offset into binary file */
312   for (r=0; r<size; r++) {
313     PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
314     if (!rank) {
315       xs     = grloc[r][0];
316       xm     = grloc[r][1];
317       ys     = grloc[r][2];
318       ym     = grloc[r][3];
319       zs     = grloc[r][4];
320       zm     = grloc[r][5];
321       nnodes = xm*ym*zm;
322     }
323     maxnnodes = PetscMax(maxnnodes,nnodes);
324     ierr     = PetscFPrintf(comm,fp,"    <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr);
325     ierr     = PetscFPrintf(comm,fp,"      <Coordinates>\n");CHKERRQ(ierr);
326     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Xcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
327     boffset += xm*sizeof(PetscScalar) + sizeof(int);
328     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Ycoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
329     boffset += ym*sizeof(PetscScalar) + sizeof(int);
330     ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"Zcoord\"  format=\"appended\"  offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr);
331     boffset += zm*sizeof(PetscScalar) + sizeof(int);
332     ierr     = PetscFPrintf(comm,fp,"      </Coordinates>\n");CHKERRQ(ierr);
333     ierr = PetscFPrintf(comm,fp,"      <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr);
334     for (link=vtk->link; link; link=link->next) {
335       Vec        X = (Vec)link->vec;
336       PetscInt   bs,f;
337       DM         daCurr;
338       PetscBool  fieldsnamed;
339       const char *vecname = "Unnamed";
340 
341       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
342       ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
343       maxbs = PetscMax(maxbs,bs);
344       if (((PetscObject)X)->name || link != vtk->link) {
345         ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr);
346       }
347 
348       /* If any fields are named, add scalar fields. Otherwise, add a vector field */
349       ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
350       if (fieldsnamed) {
351         for (f=0; f<bs; f++) {
352           char       buf[256];
353           const char *fieldname;
354           ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr);
355           if (!fieldname) {
356             ierr      = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr);
357             fieldname = buf;
358           }
359           ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr);
360           boffset += nnodes*sizeof(PetscScalar) + sizeof(int);
361         }
362       } else {
363         ierr     = PetscFPrintf(comm,fp,"        <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr);
364         boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int);
365       }
366     }
367     ierr = PetscFPrintf(comm,fp,"      </PointData>\n");CHKERRQ(ierr);
368     ierr = PetscFPrintf(comm,fp,"    </Piece>\n");CHKERRQ(ierr);
369   }
370   ierr = PetscFPrintf(comm,fp,"  </RectilinearGrid>\n");CHKERRQ(ierr);
371   ierr = PetscFPrintf(comm,fp,"  <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr);
372   ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr);
373 
374   /* Now write the arrays. */
375   tag  = ((PetscObject)viewer)->tag;
376   ierr = PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr);
377   for (r=0; r<size; r++) {
378     MPI_Status status;
379     PetscInt   xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0;
380     if (!rank) {
381       xs     = grloc[r][0];
382       xm     = grloc[r][1];
383       ys     = grloc[r][2];
384       ym     = grloc[r][3];
385       zs     = grloc[r][4];
386       zm     = grloc[r][5];
387       nnodes = xm*ym*zm;
388     } else if (r == rank) {
389       nnodes = info.xm*info.ym*info.zm;
390     }
391     {                           /* Write the coordinates */
392       Vec Coords;
393 
394       ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr);
395       if (Coords) {
396         const PetscScalar *coords;
397         ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr);
398         if (!rank) {
399           if (r) {
400             PetscMPIInt nn;
401             ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
402             ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
403             if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch");
404           } else {
405             /* x coordinates */
406             for (j=0, k=0, i=0; i<xm; i++) {
407               PetscInt Iloc = i+xm*(j+ym*k);
408               array[i] = coords[Iloc*dim + 0];
409             }
410             /* y coordinates */
411             for (i=0, k=0, j=0; j<ym; j++) {
412               PetscInt Iloc = i+xm*(j+ym*k);
413               array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
414             }
415             /* z coordinates */
416             for (i=0, j=0, k=0; k<zm; k++) {
417               PetscInt Iloc = i+xm*(j+ym*k);
418               array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
419             }
420           }
421         } else if (r == rank) {
422           xm = info.xm;
423           ym = info.ym;
424           zm = info.zm;
425           for (j=0, k=0, i=0; i<xm; i++) {
426             PetscInt Iloc = i+xm*(j+ym*k);
427             array2[i] = coords[Iloc*dim + 0];
428           }
429           for (i=0, k=0, j=0; j<ym; j++) {
430             PetscInt Iloc = i+xm*(j+ym*k);
431             array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0;
432           }
433           for (i=0, j=0, k=0; k<zm; k++) {
434             PetscInt Iloc = i+xm*(j+ym*k);
435             array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0;
436           }
437           ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
438         }
439         ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr);
440       } else {       /* Fabricate some coordinates using grid index */
441         for (i=0; i<xm; i++) {
442           array[i] = xs+i;
443         }
444         for (j=0; j<ym; j++) {
445           array[j+xm] = ys+j;
446         }
447         for (k=0; k<zm; k++) {
448           array[k+xm+ym] = zs+k;
449         }
450       }
451       if (!rank) {
452         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);CHKERRQ(ierr);
453         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);CHKERRQ(ierr);
454         ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);CHKERRQ(ierr);
455       }
456     }
457 
458     /* Write each of the objects queued up for this file */
459     for (link=vtk->link; link; link=link->next) {
460       Vec               X = (Vec)link->vec;
461       const PetscScalar *x;
462       PetscInt          bs,f;
463       DM                daCurr;
464       PetscBool         fieldsnamed;
465       ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr);
466       ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr);
467 
468       ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
469       if (!rank) {
470         if (r) {
471           PetscMPIInt nn;
472           ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRQ(ierr);
473           ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRQ(ierr);
474           if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r);
475         } else {
476           ierr = PetscArraycpy(array,x,nnodes*bs);CHKERRQ(ierr);
477         }
478         /* If any fields are named, add scalar fields. Otherwise, add a vector field */
479         ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr);
480         if (fieldsnamed) {
481           for (f=0; f<bs; f++) {
482             /* Extract and transpose the f'th field */
483             for (k=0; k<zm; k++) {
484               for (j=0; j<ym; j++) {
485                 for (i=0; i<xm; i++) {
486                   PetscInt Iloc = i+xm*(j+ym*k);
487                   array2[Iloc] = array[Iloc*bs + f];
488                 }
489               }
490             }
491             ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr);
492           }
493         }
494         ierr = PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);CHKERRQ(ierr);
495 
496       } else if (r == rank) {
497         ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRQ(ierr);
498       }
499       ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
500     }
501   }
502   ierr = PetscFree2(array,array2);CHKERRQ(ierr);
503   ierr = PetscFree(grloc);CHKERRQ(ierr);
504 
505   ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr);
506   ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr);
507   ierr = PetscFClose(comm,fp);CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 /*@C
512    DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer
513 
514    Collective
515 
516    Input Arguments:
517    odm - DM specifying the grid layout, passed as a PetscObject
518    viewer - viewer of type VTK
519 
520    Level: developer
521 
522    Notes:
523    This function is a callback used by the VTK viewer to actually write the file.
524    The reason for this odd model is that the VTK file format does not provide any way to write one field at a time.
525    Instead, metadata for the entire file needs to be available up-front before you can start writing the file.
526 
527    If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar
528    fields are written. Otherwise, a single multi-dof (vector) field is written.
529 
530 .seealso: PETSCVIEWERVTK
531 @*/
532 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer)
533 {
534   DM             dm = (DM)odm;
535   PetscBool      isvtk;
536   PetscErrorCode ierr;
537 
538   PetscFunctionBegin;
539   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
540   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
541   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr);
542   if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name);
543   switch (viewer->format) {
544   case PETSC_VIEWER_VTK_VTS:
545     ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr);
546     break;
547   case PETSC_VIEWER_VTK_VTR:
548     ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr);
549     break;
550   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]);
551   }
552   PetscFunctionReturn(0);
553 }
554