1 #include <petsc/private/dmdaimpl.h> 2 /* 3 Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires 4 including the private vtkvimpl.h file. The code should be refactored. 5 */ 6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h> 7 8 /* Helper function which determines if any DMDA fields are named. This is used 9 as a proxy for the user's intention to use DMDA fields as distinct 10 scalar-valued fields as opposed to a single vector-valued field */ 11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed) 12 { 13 PetscErrorCode ierr; 14 PetscInt f,bs; 15 16 PetscFunctionBegin; 17 *fieldsnamed = PETSC_FALSE; 18 ierr = DMDAGetDof(da,&bs);CHKERRQ(ierr); 19 for (f=0; f<bs; ++f) { 20 const char * fieldname; 21 ierr = DMDAGetFieldName(da,f,&fieldname);CHKERRQ(ierr); 22 if (fieldname) { 23 *fieldsnamed = PETSC_TRUE; 24 break; 25 } 26 } 27 PetscFunctionReturn(0); 28 } 29 30 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer) 31 { 32 const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian"; 33 #if defined(PETSC_USE_REAL_SINGLE) 34 const char precision[] = "Float32"; 35 #elif defined(PETSC_USE_REAL_DOUBLE) 36 const char precision[] = "Float64"; 37 #else 38 const char precision[] = "UnknownPrecision"; 39 #endif 40 MPI_Comm comm; 41 Vec Coords; 42 PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data; 43 PetscViewerVTKObjectLink link; 44 FILE *fp; 45 PetscMPIInt rank,size,tag; 46 DMDALocalInfo info; 47 PetscInt dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r; 48 PetscInt rloc[6],(*grloc)[6] = NULL; 49 PetscScalar *array,*array2; 50 PetscErrorCode ierr; 51 52 PetscFunctionBegin; 53 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 54 if (PetscDefined(USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported"); 55 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 56 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 57 ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 58 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 59 ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr); 60 if (Coords) { 61 PetscInt csize; 62 ierr = VecGetSize(Coords,&csize);CHKERRQ(ierr); 63 if (csize % (mx*my*mz)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch"); 64 cdim = csize/(mx*my*mz); 65 if (cdim < dim || cdim > 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch"); 66 } else { 67 cdim = dim; 68 } 69 70 ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr); 71 ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr); 72 ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr); 73 ierr = PetscFPrintf(comm,fp," <StructuredGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr); 74 75 if (rank == 0) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);} 76 rloc[0] = info.xs; 77 rloc[1] = info.xm; 78 rloc[2] = info.ys; 79 rloc[3] = info.ym; 80 rloc[4] = info.zs; 81 rloc[5] = info.zm; 82 ierr = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRMPI(ierr); 83 84 /* Write XML header */ 85 maxnnodes = 0; /* Used for the temporary array size on rank 0 */ 86 maxbs = 0; /* Used for the temporary array size on rank 0 */ 87 boffset = 0; /* Offset into binary file */ 88 for (r=0; r<size; r++) { 89 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 90 if (rank == 0) { 91 xs = grloc[r][0]; 92 xm = grloc[r][1]; 93 ys = grloc[r][2]; 94 ym = grloc[r][3]; 95 zs = grloc[r][4]; 96 zm = grloc[r][5]; 97 nnodes = xm*ym*zm; 98 } 99 maxnnodes = PetscMax(maxnnodes,nnodes); 100 ierr = PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr); 101 ierr = PetscFPrintf(comm,fp," <Points>\n");CHKERRQ(ierr); 102 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 103 boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int); 104 ierr = PetscFPrintf(comm,fp," </Points>\n");CHKERRQ(ierr); 105 106 ierr = PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr); 107 for (link=vtk->link; link; link=link->next) { 108 Vec X = (Vec)link->vec; 109 PetscInt bs,f; 110 DM daCurr; 111 PetscBool fieldsnamed; 112 const char *vecname = "Unnamed"; 113 114 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 115 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 116 maxbs = PetscMax(maxbs,bs); 117 118 if (((PetscObject)X)->name || link != vtk->link) { 119 ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr); 120 } 121 122 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 123 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 124 if (fieldsnamed) { 125 for (f=0; f<bs; f++) { 126 char buf[256]; 127 const char *fieldname; 128 ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr); 129 if (!fieldname) { 130 ierr = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr); 131 fieldname = buf; 132 } 133 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr); 134 boffset += nnodes*sizeof(PetscScalar) + sizeof(int); 135 } 136 } else { 137 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr); 138 boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int); 139 } 140 } 141 ierr = PetscFPrintf(comm,fp," </PointData>\n");CHKERRQ(ierr); 142 ierr = PetscFPrintf(comm,fp," </Piece>\n");CHKERRQ(ierr); 143 } 144 ierr = PetscFPrintf(comm,fp," </StructuredGrid>\n");CHKERRQ(ierr); 145 ierr = PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr); 146 ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr); 147 148 /* Now write the arrays. */ 149 tag = ((PetscObject)viewer)->tag; 150 ierr = PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2);CHKERRQ(ierr); 151 for (r=0; r<size; r++) { 152 MPI_Status status; 153 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 154 if (rank == 0) { 155 xs = grloc[r][0]; 156 xm = grloc[r][1]; 157 ys = grloc[r][2]; 158 ym = grloc[r][3]; 159 zs = grloc[r][4]; 160 zm = grloc[r][5]; 161 nnodes = xm*ym*zm; 162 } else if (r == rank) { 163 nnodes = info.xm*info.ym*info.zm; 164 } 165 166 /* Write the coordinates */ 167 if (Coords) { 168 const PetscScalar *coords; 169 ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr); 170 if (rank == 0) { 171 if (r) { 172 PetscMPIInt nn; 173 ierr = MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 174 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 175 if (nn != nnodes*cdim) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch"); 176 } else { 177 ierr = PetscArraycpy(array,coords,nnodes*cdim);CHKERRQ(ierr); 178 } 179 /* Transpose coordinates to VTK (C-style) ordering */ 180 for (k=0; k<zm; k++) { 181 for (j=0; j<ym; j++) { 182 for (i=0; i<xm; i++) { 183 PetscInt Iloc = i+xm*(j+ym*k); 184 array2[Iloc*3+0] = array[Iloc*cdim + 0]; 185 array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0; 186 array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0; 187 } 188 } 189 } 190 } else if (r == rank) { 191 ierr = MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 192 } 193 ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr); 194 } else { /* Fabricate some coordinates using grid index */ 195 for (k=0; k<zm; k++) { 196 for (j=0; j<ym; j++) { 197 for (i=0; i<xm; i++) { 198 PetscInt Iloc = i+xm*(j+ym*k); 199 array2[Iloc*3+0] = xs+i; 200 array2[Iloc*3+1] = ys+j; 201 array2[Iloc*3+2] = zs+k; 202 } 203 } 204 } 205 } 206 ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR);CHKERRQ(ierr); 207 208 /* Write each of the objects queued up for this file */ 209 for (link=vtk->link; link; link=link->next) { 210 Vec X = (Vec)link->vec; 211 const PetscScalar *x; 212 PetscInt bs,f; 213 DM daCurr; 214 PetscBool fieldsnamed; 215 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 216 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 217 ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr); 218 if (rank == 0) { 219 if (r) { 220 PetscMPIInt nn; 221 ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 222 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 223 if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r); 224 } else { 225 ierr = PetscArraycpy(array,x,nnodes*bs);CHKERRQ(ierr); 226 } 227 228 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 229 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 230 if (fieldsnamed) { 231 for (f=0; f<bs; f++) { 232 /* Extract and transpose the f'th field */ 233 for (k=0; k<zm; k++) { 234 for (j=0; j<ym; j++) { 235 for (i=0; i<xm; i++) { 236 PetscInt Iloc = i+xm*(j+ym*k); 237 array2[Iloc] = array[Iloc*bs + f]; 238 } 239 } 240 } 241 ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr); 242 } 243 } else { 244 ierr = PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR);CHKERRQ(ierr); 245 } 246 } else if (r == rank) { 247 ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 248 } 249 ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr); 250 } 251 } 252 ierr = PetscFree2(array,array2);CHKERRQ(ierr); 253 ierr = PetscFree(grloc);CHKERRQ(ierr); 254 255 ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr); 256 ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr); 257 ierr = PetscFClose(comm,fp);CHKERRQ(ierr); 258 PetscFunctionReturn(0); 259 } 260 261 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer) 262 { 263 const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian"; 264 #if defined(PETSC_USE_REAL_SINGLE) 265 const char precision[] = "Float32"; 266 #elif defined(PETSC_USE_REAL_DOUBLE) 267 const char precision[] = "Float64"; 268 #else 269 const char precision[] = "UnknownPrecision"; 270 #endif 271 MPI_Comm comm; 272 PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data; 273 PetscViewerVTKObjectLink link; 274 FILE *fp; 275 PetscMPIInt rank,size,tag; 276 DMDALocalInfo info; 277 PetscInt dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r; 278 PetscInt rloc[6],(*grloc)[6] = NULL; 279 PetscScalar *array,*array2; 280 PetscErrorCode ierr; 281 282 PetscFunctionBegin; 283 ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr); 284 if (PetscDefined(USE_COMPLEX)) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported"); 285 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 286 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 287 ierr = DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 288 ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr); 289 ierr = PetscFOpen(comm,vtk->filename,"wb",&fp);CHKERRQ(ierr); 290 ierr = PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n");CHKERRQ(ierr); 291 ierr = PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order);CHKERRQ(ierr); 292 ierr = PetscFPrintf(comm,fp," <RectilinearGrid WholeExtent=\"%D %D %D %D %D %D\">\n",0,mx-1,0,my-1,0,mz-1);CHKERRQ(ierr); 293 294 if (rank == 0) {ierr = PetscMalloc1(size*6,&grloc);CHKERRQ(ierr);} 295 rloc[0] = info.xs; 296 rloc[1] = info.xm; 297 rloc[2] = info.ys; 298 rloc[3] = info.ym; 299 rloc[4] = info.zs; 300 rloc[5] = info.zm; 301 ierr = MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm);CHKERRMPI(ierr); 302 303 /* Write XML header */ 304 maxnnodes = 0; /* Used for the temporary array size on rank 0 */ 305 maxbs = 0; /* Used for the temporary array size on rank 0 */ 306 boffset = 0; /* Offset into binary file */ 307 for (r=0; r<size; r++) { 308 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 309 if (rank == 0) { 310 xs = grloc[r][0]; 311 xm = grloc[r][1]; 312 ys = grloc[r][2]; 313 ym = grloc[r][3]; 314 zs = grloc[r][4]; 315 zm = grloc[r][5]; 316 nnodes = xm*ym*zm; 317 } 318 maxnnodes = PetscMax(maxnnodes,nnodes); 319 ierr = PetscFPrintf(comm,fp," <Piece Extent=\"%D %D %D %D %D %D\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1);CHKERRQ(ierr); 320 ierr = PetscFPrintf(comm,fp," <Coordinates>\n");CHKERRQ(ierr); 321 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 322 boffset += xm*sizeof(PetscScalar) + sizeof(int); 323 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 324 boffset += ym*sizeof(PetscScalar) + sizeof(int); 325 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%D\" />\n",precision,boffset);CHKERRQ(ierr); 326 boffset += zm*sizeof(PetscScalar) + sizeof(int); 327 ierr = PetscFPrintf(comm,fp," </Coordinates>\n");CHKERRQ(ierr); 328 ierr = PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n");CHKERRQ(ierr); 329 for (link=vtk->link; link; link=link->next) { 330 Vec X = (Vec)link->vec; 331 PetscInt bs,f; 332 DM daCurr; 333 PetscBool fieldsnamed; 334 const char *vecname = "Unnamed"; 335 336 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 337 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 338 maxbs = PetscMax(maxbs,bs); 339 if (((PetscObject)X)->name || link != vtk->link) { 340 ierr = PetscObjectGetName((PetscObject)X,&vecname);CHKERRQ(ierr); 341 } 342 343 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 344 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 345 if (fieldsnamed) { 346 for (f=0; f<bs; f++) { 347 char buf[256]; 348 const char *fieldname; 349 ierr = DMDAGetFieldName(daCurr,f,&fieldname);CHKERRQ(ierr); 350 if (!fieldname) { 351 ierr = PetscSNPrintf(buf,sizeof(buf),"%D",f);CHKERRQ(ierr); 352 fieldname = buf; 353 } 354 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,fieldname,boffset);CHKERRQ(ierr); 355 boffset += nnodes*sizeof(PetscScalar) + sizeof(int); 356 } 357 } else { 358 ierr = PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%D\" format=\"appended\" offset=\"%D\" />\n",precision,vecname,bs,boffset);CHKERRQ(ierr); 359 boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int); 360 } 361 } 362 ierr = PetscFPrintf(comm,fp," </PointData>\n");CHKERRQ(ierr); 363 ierr = PetscFPrintf(comm,fp," </Piece>\n");CHKERRQ(ierr); 364 } 365 ierr = PetscFPrintf(comm,fp," </RectilinearGrid>\n");CHKERRQ(ierr); 366 ierr = PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n");CHKERRQ(ierr); 367 ierr = PetscFPrintf(comm,fp,"_");CHKERRQ(ierr); 368 369 /* Now write the arrays. */ 370 tag = ((PetscObject)viewer)->tag; 371 ierr = PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2);CHKERRQ(ierr); 372 for (r=0; r<size; r++) { 373 MPI_Status status; 374 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 375 if (rank == 0) { 376 xs = grloc[r][0]; 377 xm = grloc[r][1]; 378 ys = grloc[r][2]; 379 ym = grloc[r][3]; 380 zs = grloc[r][4]; 381 zm = grloc[r][5]; 382 nnodes = xm*ym*zm; 383 } else if (r == rank) { 384 nnodes = info.xm*info.ym*info.zm; 385 } 386 { /* Write the coordinates */ 387 Vec Coords; 388 389 ierr = DMGetCoordinates(da,&Coords);CHKERRQ(ierr); 390 if (Coords) { 391 const PetscScalar *coords; 392 ierr = VecGetArrayRead(Coords,&coords);CHKERRQ(ierr); 393 if (rank == 0) { 394 if (r) { 395 PetscMPIInt nn; 396 ierr = MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 397 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 398 if (nn != xm+ym+zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch"); 399 } else { 400 /* x coordinates */ 401 for (j=0, k=0, i=0; i<xm; i++) { 402 PetscInt Iloc = i+xm*(j+ym*k); 403 array[i] = coords[Iloc*dim + 0]; 404 } 405 /* y coordinates */ 406 for (i=0, k=0, j=0; j<ym; j++) { 407 PetscInt Iloc = i+xm*(j+ym*k); 408 array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0; 409 } 410 /* z coordinates */ 411 for (i=0, j=0, k=0; k<zm; k++) { 412 PetscInt Iloc = i+xm*(j+ym*k); 413 array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0; 414 } 415 } 416 } else if (r == rank) { 417 xm = info.xm; 418 ym = info.ym; 419 zm = info.zm; 420 for (j=0, k=0, i=0; i<xm; i++) { 421 PetscInt Iloc = i+xm*(j+ym*k); 422 array2[i] = coords[Iloc*dim + 0]; 423 } 424 for (i=0, k=0, j=0; j<ym; j++) { 425 PetscInt Iloc = i+xm*(j+ym*k); 426 array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0; 427 } 428 for (i=0, j=0, k=0; k<zm; k++) { 429 PetscInt Iloc = i+xm*(j+ym*k); 430 array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0; 431 } 432 ierr = MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 433 } 434 ierr = VecRestoreArrayRead(Coords,&coords);CHKERRQ(ierr); 435 } else { /* Fabricate some coordinates using grid index */ 436 for (i=0; i<xm; i++) { 437 array[i] = xs+i; 438 } 439 for (j=0; j<ym; j++) { 440 array[j+xm] = ys+j; 441 } 442 for (k=0; k<zm; k++) { 443 array[k+xm+ym] = zs+k; 444 } 445 } 446 if (rank == 0) { 447 ierr = PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR);CHKERRQ(ierr); 448 ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR);CHKERRQ(ierr); 449 ierr = PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR);CHKERRQ(ierr); 450 } 451 } 452 453 /* Write each of the objects queued up for this file */ 454 for (link=vtk->link; link; link=link->next) { 455 Vec X = (Vec)link->vec; 456 const PetscScalar *x; 457 PetscInt bs,f; 458 DM daCurr; 459 PetscBool fieldsnamed; 460 ierr = VecGetDM(X,&daCurr);CHKERRQ(ierr); 461 ierr = DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL);CHKERRQ(ierr); 462 463 ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr); 464 if (rank == 0) { 465 if (r) { 466 PetscMPIInt nn; 467 ierr = MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status);CHKERRMPI(ierr); 468 ierr = MPI_Get_count(&status,MPIU_SCALAR,&nn);CHKERRMPI(ierr); 469 if (nn != nnodes*bs) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %D",r); 470 } else { 471 ierr = PetscArraycpy(array,x,nnodes*bs);CHKERRQ(ierr); 472 } 473 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 474 ierr = DMDAGetFieldsNamed(daCurr,&fieldsnamed);CHKERRQ(ierr); 475 if (fieldsnamed) { 476 for (f=0; f<bs; f++) { 477 /* Extract and transpose the f'th field */ 478 for (k=0; k<zm; k++) { 479 for (j=0; j<ym; j++) { 480 for (i=0; i<xm; i++) { 481 PetscInt Iloc = i+xm*(j+ym*k); 482 array2[Iloc] = array[Iloc*bs + f]; 483 } 484 } 485 } 486 ierr = PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR);CHKERRQ(ierr); 487 } 488 } 489 ierr = PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR);CHKERRQ(ierr); 490 491 } else if (r == rank) { 492 ierr = MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm);CHKERRMPI(ierr); 493 } 494 ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr); 495 } 496 } 497 ierr = PetscFree2(array,array2);CHKERRQ(ierr); 498 ierr = PetscFree(grloc);CHKERRQ(ierr); 499 500 ierr = PetscFPrintf(comm,fp,"\n </AppendedData>\n");CHKERRQ(ierr); 501 ierr = PetscFPrintf(comm,fp,"</VTKFile>\n");CHKERRQ(ierr); 502 ierr = PetscFClose(comm,fp);CHKERRQ(ierr); 503 PetscFunctionReturn(0); 504 } 505 506 /*@C 507 DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer 508 509 Collective 510 511 Input Parameters: 512 + odm - DM specifying the grid layout, passed as a PetscObject 513 - viewer - viewer of type VTK 514 515 Level: developer 516 517 Notes: 518 This function is a callback used by the VTK viewer to actually write the file. 519 The reason for this odd model is that the VTK file format does not provide any way to write one field at a time. 520 Instead, metadata for the entire file needs to be available up-front before you can start writing the file. 521 522 If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar 523 fields are written. Otherwise, a single multi-dof (vector) field is written. 524 525 .seealso: PETSCVIEWERVTK 526 @*/ 527 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer) 528 { 529 DM dm = (DM)odm; 530 PetscBool isvtk; 531 PetscErrorCode ierr; 532 533 PetscFunctionBegin; 534 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 535 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 536 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk);CHKERRQ(ierr); 537 if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name); 538 switch (viewer->format) { 539 case PETSC_VIEWER_VTK_VTS: 540 ierr = DMDAVTKWriteAll_VTS(dm,viewer);CHKERRQ(ierr); 541 break; 542 case PETSC_VIEWER_VTK_VTR: 543 ierr = DMDAVTKWriteAll_VTR(dm,viewer);CHKERRQ(ierr); 544 break; 545 default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]); 546 } 547 PetscFunctionReturn(0); 548 } 549