1 #include <petsc/private/dmdaimpl.h> 2 /* 3 Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires 4 including the private vtkvimpl.h file. The code should be refactored. 5 */ 6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h> 7 8 /* Helper function which determines if any DMDA fields are named. This is used 9 as a proxy for the user's intention to use DMDA fields as distinct 10 scalar-valued fields as opposed to a single vector-valued field */ 11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed) 12 { 13 PetscErrorCode ierr; 14 PetscInt f,bs; 15 16 PetscFunctionBegin; 17 *fieldsnamed = PETSC_FALSE; 18 ierr = DMDAGetDof(da,&bs);CHKERRQ(ierr); 19 for (f=0; f<bs; ++f) { 20 const char * fieldname; 21 ierr = DMDAGetFieldName(da,f,&fieldname);CHKERRQ(ierr); 22 if (fieldname) { 23 *fieldsnamed = PETSC_TRUE; 24 break; 25 } 26 } 27 PetscFunctionReturn(0); 28 } 29 30 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer) 31 { 32 const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian"; 33 #if defined(PETSC_USE_REAL_SINGLE) 34 const char precision[] = "Float32"; 35 #elif defined(PETSC_USE_REAL_DOUBLE) 36 const char precision[] = "Float64"; 37 #else 38 const char precision[] = "UnknownPrecision"; 39 #endif 40 MPI_Comm comm; 41 Vec Coords; 42 PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data; 43 PetscViewerVTKObjectLink link; 44 FILE *fp; 45 PetscMPIInt rank,size,tag; 46 DMDALocalInfo info; 47 PetscInt dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r; 48 PetscInt rloc[6],(*grloc)[6] = NULL; 49 PetscScalar *array,*array2; 50 PetscErrorCode ierr; 51 52 PetscFunctionBegin; 53 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 54 if (PetscDefined(PETSC_USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported"); 55 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 56 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 57 ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 58 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 59 ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr); 60 if (Coords) { 61 PetscInt csize; 62 ierr = VecGetSize(Coords,&csize);CHKERRQ(ierr); 63 if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch"); 64 cdim = csize/(mx*my*mz); 65 if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch"); 66 } else { 67 cdim = dim; 68 } 69 70 ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr); 71 ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr); 72 ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr); 73 ierr = PetscFPrintf(comm,fp," <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr); 74 75 if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);} 76 rloc[0] = info.xs; 77 rloc[1] = info.xm; 78 rloc[2] = info.ys; 79 rloc[3] = info.ym; 80 rloc[4] = info.zs; 81 rloc[5] = info.zm; 82 ierr = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr); 83 84 /* Write XML header */ 85 maxnnodes = 0; /* Used for the temporary array size on rank 0 */ 86 maxbs = 0; /* Used for the temporary array size on rank 0 */ 87 boffset = 0; /* Offset into binary file */ 88 for (r=0; r<size; r++) { 89 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 90 if (!rank) { 91 xs = grloc[r][0]; 92 xm = grloc[r][1]; 93 ys = grloc[r][2]; 94 ym = grloc[r][3]; 95 zs = grloc[r][4]; 96 zm = grloc[r][5]; 97 nnodes = xm*ym*zm; 98 } 99 maxnnodes = PetscMax(maxnnodes,nnodes); 100 ierr = PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr); 101 ierr = PetscFPrintf(comm,fp," <Points>\n");CHKERRQ(ierr); 102 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 103 boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int); 104 ierr = PetscFPrintf(comm,fp," </Points>\n");CHKERRQ(ierr); 105 106 ierr = PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr); 107 for (link=vtk->link; link; link=link->next) { 108 Vec X = (Vec)link->vec; 109 PetscInt bs,f; 110 DM daCurr; 111 PetscBool fieldsnamed; 112 const char *vecname = "Unnamed"; 113 114 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 115 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 116 maxbs = PetscMax(maxbs,bs); 117 118 if (((PetscObject)X)->name || link != vtk->link) { 119 ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr); 120 } 121 122 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 123 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 124 if (fieldsnamed) { 125 for (f=0; f<bs; f++) { 126 char buf[256]; 127 const char *fieldname; 128 ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr); 129 if (!fieldname) { 130 ierr = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr); 131 fieldname = buf; 132 } 133 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr); 134 boffset += nnodes*sizeof(PetscScalar) + sizeof(int); 135 } 136 } else { 137 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr); 138 boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int); 139 } 140 } 141 ierr = PetscFPrintf(comm,fp," </PointData>\n");CHKERRQ(ierr); 142 ierr = PetscFPrintf(comm,fp," </Piece>\n");CHKERRQ(ierr); 143 } 144 ierr = PetscFPrintf(comm,fp," </StructuredGrid>\n");CHKERRQ(ierr); 145 ierr = PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr); 146 ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr); 147 148 /* Now write the arrays. */ 149 tag = ((PetscObject)viewer)->tag; 150 ierr = PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);CHKERRQ(ierr); 151 for (r=0; r<size; r++) { 152 MPI_Status status; 153 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 154 if (!rank) { 155 xs = grloc[r][0]; 156 xm = grloc[r][1]; 157 ys = grloc[r][2]; 158 ym = grloc[r][3]; 159 zs = grloc[r][4]; 160 zm = grloc[r][5]; 161 nnodes = xm*ym*zm; 162 } else if (r == rank) { 163 nnodes = info.xm*info.ym*info.zm; 164 } 165 166 /* Write the coordinates */ 167 if (Coords) { 168 const PetscScalar *coords; 169 ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr); 170 if (!rank) { 171 if (r) { 172 PetscMPIInt nn; 173 ierr = MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 174 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 175 if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch"); 176 } else { 177 ierr = PetscArraycpy(array,coords,nnodes*cdim);CHKERRQ(ierr); 178 } 179 /* Transpose coordinates to VTK (C-style) ordering */ 180 for (k=0; k<zm; k++) { 181 for (j=0; j<ym; j++) { 182 for (i=0; i<xm; i++) { 183 PetscInt Iloc = i+xm*(j+ym*k); 184 array2[Iloc*3+0] = array[Iloc*cdim + 0]; 185 array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0; 186 array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0; 187 } 188 } 189 } 190 } else if (r == rank) { 191 ierr = MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 192 } 193 ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr); 194 } else { /* Fabricate some coordinates using grid index */ 195 for (k=0; k<zm; k++) { 196 for (j=0; j<ym; j++) { 197 for (i=0; i<xm; i++) { 198 PetscInt Iloc = i+xm*(j+ym*k); 199 array2[Iloc*3+0] = xs+i; 200 array2[Iloc*3+1] = ys+j; 201 array2[Iloc*3+2] = zs+k; 202 } 203 } 204 } 205 } 206 ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);CHKERRQ(ierr); 207 208 /* Write each of the objects queued up for this file */ 209 for (link=vtk->link; link; link=link->next) { 210 Vec X = (Vec)link->vec; 211 const PetscScalar *x; 212 PetscInt bs,f; 213 DM daCurr; 214 PetscBool fieldsnamed; 215 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 216 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 217 ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr); 218 if (!rank) { 219 if (r) { 220 PetscMPIInt nn; 221 ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 222 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 223 if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r); 224 } else { 225 ierr = PetscArraycpy(array,x,nnodes*bs);CHKERRQ(ierr); 226 } 227 228 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 229 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 230 if (fieldsnamed) { 231 for (f=0; f<bs; f++) { 232 /* Extract and transpose the f'th field */ 233 for (k=0; k<zm; k++) { 234 for (j=0; j<ym; j++) { 235 for (i=0; i<xm; i++) { 236 PetscInt Iloc = i+xm*(j+ym*k); 237 array2[Iloc] = array[Iloc*bs + f]; 238 } 239 } 240 } 241 ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr); 242 } 243 } else { 244 ierr = PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);CHKERRQ(ierr); 245 } 246 } else if (r == rank) { 247 ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 248 } 249 ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr); 250 } 251 } 252 ierr = PetscFree2(array,array2);CHKERRQ(ierr); 253 ierr = PetscFree(grloc);CHKERRQ(ierr); 254 255 ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr); 256 ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr); 257 ierr = PetscFClose(comm,fp);CHKERRQ(ierr); 258 PetscFunctionReturn(0); 259 } 260 261 262 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer) 263 { 264 const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian"; 265 #if defined(PETSC_USE_REAL_SINGLE) 266 const char precision[] = "Float32"; 267 #elif defined(PETSC_USE_REAL_DOUBLE) 268 const char precision[] = "Float64"; 269 #else 270 const char precision[] = "UnknownPrecision"; 271 #endif 272 MPI_Comm comm; 273 PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data; 274 PetscViewerVTKObjectLink link; 275 FILE *fp; 276 PetscMPIInt rank,size,tag; 277 DMDALocalInfo info; 278 PetscInt dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r; 279 PetscInt rloc[6],(*grloc)[6] = NULL; 280 PetscScalar *array,*array2; 281 PetscErrorCode ierr; 282 283 PetscFunctionBegin; 284 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 285 if (PetscDefined(PETSC_USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported"); 286 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 287 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 288 ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 289 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 290 ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr); 291 ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr); 292 ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr); 293 ierr = PetscFPrintf(comm,fp," <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr); 294 295 if (!rank) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);} 296 rloc[0] = info.xs; 297 rloc[1] = info.xm; 298 rloc[2] = info.ys; 299 rloc[3] = info.ym; 300 rloc[4] = info.zs; 301 rloc[5] = info.zm; 302 ierr = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRQ(ierr); 303 304 /* Write XML header */ 305 maxnnodes = 0; /* Used for the temporary array size on rank 0 */ 306 maxbs = 0; /* Used for the temporary array size on rank 0 */ 307 boffset = 0; /* Offset into binary file */ 308 for (r=0; r<size; r++) { 309 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 310 if (!rank) { 311 xs = grloc[r][0]; 312 xm = grloc[r][1]; 313 ys = grloc[r][2]; 314 ym = grloc[r][3]; 315 zs = grloc[r][4]; 316 zm = grloc[r][5]; 317 nnodes = xm*ym*zm; 318 } 319 maxnnodes = PetscMax(maxnnodes,nnodes); 320 ierr = PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr); 321 ierr = PetscFPrintf(comm,fp," <Coordinates>\n");CHKERRQ(ierr); 322 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 323 boffset += xm*sizeof(PetscScalar) + sizeof(int); 324 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 325 boffset += ym*sizeof(PetscScalar) + sizeof(int); 326 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 327 boffset += zm*sizeof(PetscScalar) + sizeof(int); 328 ierr = PetscFPrintf(comm,fp," </Coordinates>\n");CHKERRQ(ierr); 329 ierr = PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr); 330 for (link=vtk->link; link; link=link->next) { 331 Vec X = (Vec)link->vec; 332 PetscInt bs,f; 333 DM daCurr; 334 PetscBool fieldsnamed; 335 const char *vecname = "Unnamed"; 336 337 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 338 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 339 maxbs = PetscMax(maxbs,bs); 340 if (((PetscObject)X)->name || link != vtk->link) { 341 ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr); 342 } 343 344 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 345 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 346 if (fieldsnamed) { 347 for (f=0; f<bs; f++) { 348 char buf[256]; 349 const char *fieldname; 350 ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr); 351 if (!fieldname) { 352 ierr = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr); 353 fieldname = buf; 354 } 355 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr); 356 boffset += nnodes*sizeof(PetscScalar) + sizeof(int); 357 } 358 } else { 359 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr); 360 boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int); 361 } 362 } 363 ierr = PetscFPrintf(comm,fp," </PointData>\n");CHKERRQ(ierr); 364 ierr = PetscFPrintf(comm,fp," </Piece>\n");CHKERRQ(ierr); 365 } 366 ierr = PetscFPrintf(comm,fp," </RectilinearGrid>\n");CHKERRQ(ierr); 367 ierr = PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr); 368 ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr); 369 370 /* Now write the arrays. */ 371 tag = ((PetscObject)viewer)->tag; 372 ierr = PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr); 373 for (r=0; r<size; r++) { 374 MPI_Status status; 375 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 376 if (!rank) { 377 xs = grloc[r][0]; 378 xm = grloc[r][1]; 379 ys = grloc[r][2]; 380 ym = grloc[r][3]; 381 zs = grloc[r][4]; 382 zm = grloc[r][5]; 383 nnodes = xm*ym*zm; 384 } else if (r == rank) { 385 nnodes = info.xm*info.ym*info.zm; 386 } 387 { /* Write the coordinates */ 388 Vec Coords; 389 390 ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr); 391 if (Coords) { 392 const PetscScalar *coords; 393 ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr); 394 if (!rank) { 395 if (r) { 396 PetscMPIInt nn; 397 ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 398 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 399 if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch"); 400 } else { 401 /* x coordinates */ 402 for (j=0, k=0, i=0; i<xm; i++) { 403 PetscInt Iloc = i+xm*(j+ym*k); 404 array[i] = coords[Iloc*dim + 0]; 405 } 406 /* y coordinates */ 407 for (i=0, k=0, j=0; j<ym; j++) { 408 PetscInt Iloc = i+xm*(j+ym*k); 409 array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0; 410 } 411 /* z coordinates */ 412 for (i=0, j=0, k=0; k<zm; k++) { 413 PetscInt Iloc = i+xm*(j+ym*k); 414 array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0; 415 } 416 } 417 } else if (r == rank) { 418 xm = info.xm; 419 ym = info.ym; 420 zm = info.zm; 421 for (j=0, k=0, i=0; i<xm; i++) { 422 PetscInt Iloc = i+xm*(j+ym*k); 423 array2[i] = coords[Iloc*dim + 0]; 424 } 425 for (i=0, k=0, j=0; j<ym; j++) { 426 PetscInt Iloc = i+xm*(j+ym*k); 427 array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0; 428 } 429 for (i=0, j=0, k=0; k<zm; k++) { 430 PetscInt Iloc = i+xm*(j+ym*k); 431 array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0; 432 } 433 ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 434 } 435 ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr); 436 } else { /* Fabricate some coordinates using grid index */ 437 for (i=0; i<xm; i++) { 438 array[i] = xs+i; 439 } 440 for (j=0; j<ym; j++) { 441 array[j+xm] = ys+j; 442 } 443 for (k=0; k<zm; k++) { 444 array[k+xm+ym] = zs+k; 445 } 446 } 447 if (!rank) { 448 ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);CHKERRQ(ierr); 449 ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);CHKERRQ(ierr); 450 ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);CHKERRQ(ierr); 451 } 452 } 453 454 /* Write each of the objects queued up for this file */ 455 for (link=vtk->link; link; link=link->next) { 456 Vec X = (Vec)link->vec; 457 const PetscScalar *x; 458 PetscInt bs,f; 459 DM daCurr; 460 PetscBool fieldsnamed; 461 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 462 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 463 464 ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr); 465 if (!rank) { 466 if (r) { 467 PetscMPIInt nn; 468 ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 469 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 470 if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r); 471 } else { 472 ierr = PetscArraycpy(array,x,nnodes*bs);CHKERRQ(ierr); 473 } 474 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 475 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 476 if (fieldsnamed) { 477 for (f=0; f<bs; f++) { 478 /* Extract and transpose the f'th field */ 479 for (k=0; k<zm; k++) { 480 for (j=0; j<ym; j++) { 481 for (i=0; i<xm; i++) { 482 PetscInt Iloc = i+xm*(j+ym*k); 483 array2[Iloc] = array[Iloc*bs + f]; 484 } 485 } 486 } 487 ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr); 488 } 489 } 490 ierr = PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);CHKERRQ(ierr); 491 492 } else if (r == rank) { 493 ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 494 } 495 ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr); 496 } 497 } 498 ierr = PetscFree2(array,array2);CHKERRQ(ierr); 499 ierr = PetscFree(grloc);CHKERRQ(ierr); 500 501 ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr); 502 ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr); 503 ierr = PetscFClose(comm,fp);CHKERRQ(ierr); 504 PetscFunctionReturn(0); 505 } 506 507 /*@C 508 DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer 509 510 Collective 511 512 Input Arguments: 513 odm - DM specifying the grid layout, passed as a PetscObject 514 viewer - viewer of type VTK 515 516 Level: developer 517 518 Notes: 519 This function is a callback used by the VTK viewer to actually write the file. 520 The reason for this odd model is that the VTK file format does not provide any way to write one field at a time. 521 Instead, metadata for the entire file needs to be available up-front before you can start writing the file. 522 523 If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar 524 fields are written. Otherwise, a single multi-dof (vector) field is written. 525 526 .seealso: PETSCVIEWERVTK 527 @*/ 528 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer) 529 { 530 DM dm = (DM)odm; 531 PetscBool isvtk; 532 PetscErrorCode ierr; 533 534 PetscFunctionBegin; 535 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 536 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 537 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr); 538 if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name); 539 switch (viewer->format) { 540 case PETSC_VIEWER_VTK_VTS: 541 ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr); 542 break; 543 case PETSC_VIEWER_VTK_VTR: 544 ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr); 545 break; 546 default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]); 547 } 548 PetscFunctionReturn(0); 549 } 550