1 #include <petsc/private/dmdaimpl.h> 2 /* 3 Note that the API for using PETSCVIEWERVTK is totally wrong since its use requires 4 including the private vtkvimpl.h file. The code should be refactored. 5 */ 6 #include <../src/sys/classes/viewer/impls/vtk/vtkvimpl.h> 7 8 /* Helper function which determines if any DMDA fields are named. This is used 9 as a proxy for the user's intention to use DMDA fields as distinct 10 scalar-valued fields as opposed to a single vector-valued field */ 11 static PetscErrorCode DMDAGetFieldsNamed(DM da,PetscBool *fieldsnamed) 12 { 13 PetscInt f,bs; 14 15 PetscFunctionBegin; 16 *fieldsnamed = PETSC_FALSE; 17 PetscCall(DMDAGetDof(da,&bs)); 18 for (f=0; f<bs; ++f) { 19 const char * fieldname; 20 PetscCall(DMDAGetFieldName(da,f,&fieldname)); 21 if (fieldname) { 22 *fieldsnamed = PETSC_TRUE; 23 break; 24 } 25 } 26 PetscFunctionReturn(0); 27 } 28 29 static PetscErrorCode DMDAVTKWriteAll_VTS(DM da,PetscViewer viewer) 30 { 31 const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian"; 32 #if defined(PETSC_USE_REAL_SINGLE) 33 const char precision[] = "Float32"; 34 #elif defined(PETSC_USE_REAL_DOUBLE) 35 const char precision[] = "Float64"; 36 #else 37 const char precision[] = "UnknownPrecision"; 38 #endif 39 MPI_Comm comm; 40 Vec Coords; 41 PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data; 42 PetscViewerVTKObjectLink link; 43 FILE *fp; 44 PetscMPIInt rank,size,tag; 45 DMDALocalInfo info; 46 PetscInt dim,mx,my,mz,cdim,bs,boffset,maxnnodes,maxbs,i,j,k,r; 47 PetscInt rloc[6],(*grloc)[6] = NULL; 48 PetscScalar *array,*array2; 49 50 PetscFunctionBegin; 51 PetscCall(PetscObjectGetComm((PetscObject)da,&comm)); 52 PetscCheck(!PetscDefined(USE_COMPLEX),PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported"); 53 PetscCallMPI(MPI_Comm_size(comm,&size)); 54 PetscCallMPI(MPI_Comm_rank(comm,&rank)); 55 PetscCall(DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL)); 56 PetscCall(DMDAGetLocalInfo(da,&info)); 57 PetscCall(DMGetCoordinates(da,&Coords)); 58 if (Coords) { 59 PetscInt csize; 60 PetscCall(VecGetSize(Coords,&csize)); 61 PetscCheck(csize % (mx*my*mz) == 0,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch"); 62 cdim = csize/(mx*my*mz); 63 PetscCheck(cdim >= dim && cdim <= 3,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Coordinate vector size mismatch"); 64 } else { 65 cdim = dim; 66 } 67 68 PetscCall(PetscFOpen(comm,vtk->filename,"wb",&fp)); 69 PetscCall(PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n")); 70 PetscCall(PetscFPrintf(comm,fp,"<VTKFile type=\"StructuredGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order)); 71 PetscCall(PetscFPrintf(comm,fp," <StructuredGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n",0,mx-1,0,my-1,0,mz-1)); 72 73 if (rank == 0) PetscCall(PetscMalloc1(size*6,&grloc)); 74 rloc[0] = info.xs; 75 rloc[1] = info.xm; 76 rloc[2] = info.ys; 77 rloc[3] = info.ym; 78 rloc[4] = info.zs; 79 rloc[5] = info.zm; 80 PetscCallMPI(MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm)); 81 82 /* Write XML header */ 83 maxnnodes = 0; /* Used for the temporary array size on rank 0 */ 84 maxbs = 0; /* Used for the temporary array size on rank 0 */ 85 boffset = 0; /* Offset into binary file */ 86 for (r=0; r<size; r++) { 87 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 88 if (rank == 0) { 89 xs = grloc[r][0]; 90 xm = grloc[r][1]; 91 ys = grloc[r][2]; 92 ym = grloc[r][3]; 93 zs = grloc[r][4]; 94 zm = grloc[r][5]; 95 nnodes = xm*ym*zm; 96 } 97 maxnnodes = PetscMax(maxnnodes,nnodes); 98 PetscCall(PetscFPrintf(comm,fp," <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1)); 99 PetscCall(PetscFPrintf(comm,fp," <Points>\n")); 100 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Position\" NumberOfComponents=\"3\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,boffset)); 101 boffset += 3*nnodes*sizeof(PetscScalar) + sizeof(int); 102 PetscCall(PetscFPrintf(comm,fp," </Points>\n")); 103 104 PetscCall(PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n")); 105 for (link=vtk->link; link; link=link->next) { 106 Vec X = (Vec)link->vec; 107 PetscInt bs,f; 108 DM daCurr; 109 PetscBool fieldsnamed; 110 const char *vecname = "Unnamed"; 111 112 PetscCall(VecGetDM(X,&daCurr)); 113 PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL)); 114 maxbs = PetscMax(maxbs,bs); 115 116 if (((PetscObject)X)->name || link != vtk->link) { 117 PetscCall(PetscObjectGetName((PetscObject)X,&vecname)); 118 } 119 120 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 121 PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed)); 122 if (fieldsnamed) { 123 for (f=0; f<bs; f++) { 124 char buf[256]; 125 const char *fieldname; 126 PetscCall(DMDAGetFieldName(daCurr,f,&fieldname)); 127 if (!fieldname) { 128 PetscCall(PetscSNPrintf(buf,sizeof(buf),"%" PetscInt_FMT,f)); 129 fieldname = buf; 130 } 131 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,fieldname,boffset)); 132 boffset += nnodes*sizeof(PetscScalar) + sizeof(int); 133 } 134 } else { 135 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,bs,boffset)); 136 boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int); 137 } 138 } 139 PetscCall(PetscFPrintf(comm,fp," </PointData>\n")); 140 PetscCall(PetscFPrintf(comm,fp," </Piece>\n")); 141 } 142 PetscCall(PetscFPrintf(comm,fp," </StructuredGrid>\n")); 143 PetscCall(PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n")); 144 PetscCall(PetscFPrintf(comm,fp,"_")); 145 146 /* Now write the arrays. */ 147 tag = ((PetscObject)viewer)->tag; 148 PetscCall(PetscMalloc2(maxnnodes*PetscMax(3,maxbs),&array,maxnnodes*PetscMax(3,maxbs),&array2)); 149 for (r=0; r<size; r++) { 150 MPI_Status status; 151 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 152 if (rank == 0) { 153 xs = grloc[r][0]; 154 xm = grloc[r][1]; 155 ys = grloc[r][2]; 156 ym = grloc[r][3]; 157 zs = grloc[r][4]; 158 zm = grloc[r][5]; 159 nnodes = xm*ym*zm; 160 } else if (r == rank) { 161 nnodes = info.xm*info.ym*info.zm; 162 } 163 164 /* Write the coordinates */ 165 if (Coords) { 166 const PetscScalar *coords; 167 PetscCall(VecGetArrayRead(Coords,&coords)); 168 if (rank == 0) { 169 if (r) { 170 PetscMPIInt nn; 171 PetscCallMPI(MPI_Recv(array,nnodes*cdim,MPIU_SCALAR,r,tag,comm,&status)); 172 PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn)); 173 PetscCheck(nn == nnodes*cdim,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch"); 174 } else PetscCall(PetscArraycpy(array,coords,nnodes*cdim)); 175 /* Transpose coordinates to VTK (C-style) ordering */ 176 for (k=0; k<zm; k++) { 177 for (j=0; j<ym; j++) { 178 for (i=0; i<xm; i++) { 179 PetscInt Iloc = i+xm*(j+ym*k); 180 array2[Iloc*3+0] = array[Iloc*cdim + 0]; 181 array2[Iloc*3+1] = cdim > 1 ? array[Iloc*cdim + 1] : 0.0; 182 array2[Iloc*3+2] = cdim > 2 ? array[Iloc*cdim + 2] : 0.0; 183 } 184 } 185 } 186 } else if (r == rank) { 187 PetscCallMPI(MPI_Send((void*)coords,nnodes*cdim,MPIU_SCALAR,0,tag,comm)); 188 } 189 PetscCall(VecRestoreArrayRead(Coords,&coords)); 190 } else { /* Fabricate some coordinates using grid index */ 191 for (k=0; k<zm; k++) { 192 for (j=0; j<ym; j++) { 193 for (i=0; i<xm; i++) { 194 PetscInt Iloc = i+xm*(j+ym*k); 195 array2[Iloc*3+0] = xs+i; 196 array2[Iloc*3+1] = ys+j; 197 array2[Iloc*3+2] = zs+k; 198 } 199 } 200 } 201 } 202 PetscCall(PetscViewerVTKFWrite(viewer,fp,array2,nnodes*3,MPIU_SCALAR)); 203 204 /* Write each of the objects queued up for this file */ 205 for (link=vtk->link; link; link=link->next) { 206 Vec X = (Vec)link->vec; 207 const PetscScalar *x; 208 PetscInt bs,f; 209 DM daCurr; 210 PetscBool fieldsnamed; 211 PetscCall(VecGetDM(X,&daCurr)); 212 PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL, NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL)); 213 PetscCall(VecGetArrayRead(X,&x)); 214 if (rank == 0) { 215 if (r) { 216 PetscMPIInt nn; 217 PetscCallMPI(MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status)); 218 PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn)); 219 PetscCheck(nn == nnodes*bs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %" PetscInt_FMT,r); 220 } else PetscCall(PetscArraycpy(array,x,nnodes*bs)); 221 222 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 223 PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed)); 224 if (fieldsnamed) { 225 for (f=0; f<bs; f++) { 226 /* Extract and transpose the f'th field */ 227 for (k=0; k<zm; k++) { 228 for (j=0; j<ym; j++) { 229 for (i=0; i<xm; i++) { 230 PetscInt Iloc = i+xm*(j+ym*k); 231 array2[Iloc] = array[Iloc*bs + f]; 232 } 233 } 234 } 235 PetscCall(PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR)); 236 } 237 } else PetscCall(PetscViewerVTKFWrite(viewer,fp,array,bs*nnodes,MPIU_SCALAR)); 238 } else if (r == rank) PetscCallMPI(MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm)); 239 PetscCall(VecRestoreArrayRead(X,&x)); 240 } 241 } 242 PetscCall(PetscFree2(array,array2)); 243 PetscCall(PetscFree(grloc)); 244 245 PetscCall(PetscFPrintf(comm,fp,"\n </AppendedData>\n")); 246 PetscCall(PetscFPrintf(comm,fp,"</VTKFile>\n")); 247 PetscCall(PetscFClose(comm,fp)); 248 PetscFunctionReturn(0); 249 } 250 251 static PetscErrorCode DMDAVTKWriteAll_VTR(DM da,PetscViewer viewer) 252 { 253 const char *byte_order = PetscBinaryBigEndian() ? "BigEndian" : "LittleEndian"; 254 #if defined(PETSC_USE_REAL_SINGLE) 255 const char precision[] = "Float32"; 256 #elif defined(PETSC_USE_REAL_DOUBLE) 257 const char precision[] = "Float64"; 258 #else 259 const char precision[] = "UnknownPrecision"; 260 #endif 261 MPI_Comm comm; 262 PetscViewer_VTK *vtk = (PetscViewer_VTK*)viewer->data; 263 PetscViewerVTKObjectLink link; 264 FILE *fp; 265 PetscMPIInt rank,size,tag; 266 DMDALocalInfo info; 267 PetscInt dim,mx,my,mz,boffset,maxnnodes,maxbs,i,j,k,r; 268 PetscInt rloc[6],(*grloc)[6] = NULL; 269 PetscScalar *array,*array2; 270 271 PetscFunctionBegin; 272 PetscCall(PetscObjectGetComm((PetscObject)da,&comm)); 273 PetscCheck(!PetscDefined(USE_COMPLEX),PETSC_COMM_SELF,PETSC_ERR_SUP,"Complex values not supported"); 274 PetscCallMPI(MPI_Comm_size(comm,&size)); 275 PetscCallMPI(MPI_Comm_rank(comm,&rank)); 276 PetscCall(DMDAGetInfo(da,&dim,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL)); 277 PetscCall(DMDAGetLocalInfo(da,&info)); 278 PetscCall(PetscFOpen(comm,vtk->filename,"wb",&fp)); 279 PetscCall(PetscFPrintf(comm,fp,"<?xml version=\"1.0\"?>\n")); 280 PetscCall(PetscFPrintf(comm,fp,"<VTKFile type=\"RectilinearGrid\" version=\"0.1\" byte_order=\"%s\">\n",byte_order)); 281 PetscCall(PetscFPrintf(comm,fp," <RectilinearGrid WholeExtent=\"%d %" PetscInt_FMT " %d %" PetscInt_FMT " %d %" PetscInt_FMT "\">\n",0,mx-1,0,my-1,0,mz-1)); 282 283 if (rank == 0) PetscCall(PetscMalloc1(size*6,&grloc)); 284 rloc[0] = info.xs; 285 rloc[1] = info.xm; 286 rloc[2] = info.ys; 287 rloc[3] = info.ym; 288 rloc[4] = info.zs; 289 rloc[5] = info.zm; 290 PetscCallMPI(MPI_Gather(rloc,6,MPIU_INT,&grloc[0][0],6,MPIU_INT,0,comm)); 291 292 /* Write XML header */ 293 maxnnodes = 0; /* Used for the temporary array size on rank 0 */ 294 maxbs = 0; /* Used for the temporary array size on rank 0 */ 295 boffset = 0; /* Offset into binary file */ 296 for (r=0; r<size; r++) { 297 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 298 if (rank == 0) { 299 xs = grloc[r][0]; 300 xm = grloc[r][1]; 301 ys = grloc[r][2]; 302 ym = grloc[r][3]; 303 zs = grloc[r][4]; 304 zm = grloc[r][5]; 305 nnodes = xm*ym*zm; 306 } 307 maxnnodes = PetscMax(maxnnodes,nnodes); 308 PetscCall(PetscFPrintf(comm,fp," <Piece Extent=\"%" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT "\">\n",xs,xs+xm-1,ys,ys+ym-1,zs,zs+zm-1)); 309 PetscCall(PetscFPrintf(comm,fp," <Coordinates>\n")); 310 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Xcoord\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,boffset)); 311 boffset += xm*sizeof(PetscScalar) + sizeof(int); 312 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Ycoord\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,boffset)); 313 boffset += ym*sizeof(PetscScalar) + sizeof(int); 314 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"Zcoord\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,boffset)); 315 boffset += zm*sizeof(PetscScalar) + sizeof(int); 316 PetscCall(PetscFPrintf(comm,fp," </Coordinates>\n")); 317 PetscCall(PetscFPrintf(comm,fp," <PointData Scalars=\"ScalarPointData\">\n")); 318 for (link=vtk->link; link; link=link->next) { 319 Vec X = (Vec)link->vec; 320 PetscInt bs,f; 321 DM daCurr; 322 PetscBool fieldsnamed; 323 const char *vecname = "Unnamed"; 324 325 PetscCall(VecGetDM(X,&daCurr)); 326 PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL)); 327 maxbs = PetscMax(maxbs,bs); 328 if (((PetscObject)X)->name || link != vtk->link) { 329 PetscCall(PetscObjectGetName((PetscObject)X,&vecname)); 330 } 331 332 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 333 PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed)); 334 if (fieldsnamed) { 335 for (f=0; f<bs; f++) { 336 char buf[256]; 337 const char *fieldname; 338 PetscCall(DMDAGetFieldName(daCurr,f,&fieldname)); 339 if (!fieldname) { 340 PetscCall(PetscSNPrintf(buf,sizeof(buf),"%" PetscInt_FMT,f)); 341 fieldname = buf; 342 } 343 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s.%s\" NumberOfComponents=\"1\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,fieldname,boffset)); 344 boffset += nnodes*sizeof(PetscScalar) + sizeof(int); 345 } 346 } else { 347 PetscCall(PetscFPrintf(comm,fp," <DataArray type=\"%s\" Name=\"%s\" NumberOfComponents=\"%" PetscInt_FMT "\" format=\"appended\" offset=\"%" PetscInt_FMT "\" />\n",precision,vecname,bs,boffset)); 348 boffset += bs*nnodes*sizeof(PetscScalar) + sizeof(int); 349 } 350 } 351 PetscCall(PetscFPrintf(comm,fp," </PointData>\n")); 352 PetscCall(PetscFPrintf(comm,fp," </Piece>\n")); 353 } 354 PetscCall(PetscFPrintf(comm,fp," </RectilinearGrid>\n")); 355 PetscCall(PetscFPrintf(comm,fp," <AppendedData encoding=\"raw\">\n")); 356 PetscCall(PetscFPrintf(comm,fp,"_")); 357 358 /* Now write the arrays. */ 359 tag = ((PetscObject)viewer)->tag; 360 PetscCall(PetscMalloc2(PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array,PetscMax(maxnnodes*maxbs,info.xm+info.ym+info.zm),&array2)); 361 for (r=0; r<size; r++) { 362 MPI_Status status; 363 PetscInt xs=-1,xm=-1,ys=-1,ym=-1,zs=-1,zm=-1,nnodes = 0; 364 if (rank == 0) { 365 xs = grloc[r][0]; 366 xm = grloc[r][1]; 367 ys = grloc[r][2]; 368 ym = grloc[r][3]; 369 zs = grloc[r][4]; 370 zm = grloc[r][5]; 371 nnodes = xm*ym*zm; 372 } else if (r == rank) { 373 nnodes = info.xm*info.ym*info.zm; 374 } 375 { /* Write the coordinates */ 376 Vec Coords; 377 378 PetscCall(DMGetCoordinates(da,&Coords)); 379 if (Coords) { 380 const PetscScalar *coords; 381 PetscCall(VecGetArrayRead(Coords,&coords)); 382 if (rank == 0) { 383 if (r) { 384 PetscMPIInt nn; 385 PetscCallMPI(MPI_Recv(array,xm+ym+zm,MPIU_SCALAR,r,tag,comm,&status)); 386 PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn)); 387 PetscCheck(nn == xm+ym+zm,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch"); 388 } else { 389 /* x coordinates */ 390 for (j=0, k=0, i=0; i<xm; i++) { 391 PetscInt Iloc = i+xm*(j+ym*k); 392 array[i] = coords[Iloc*dim + 0]; 393 } 394 /* y coordinates */ 395 for (i=0, k=0, j=0; j<ym; j++) { 396 PetscInt Iloc = i+xm*(j+ym*k); 397 array[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0; 398 } 399 /* z coordinates */ 400 for (i=0, j=0, k=0; k<zm; k++) { 401 PetscInt Iloc = i+xm*(j+ym*k); 402 array[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0; 403 } 404 } 405 } else if (r == rank) { 406 xm = info.xm; 407 ym = info.ym; 408 zm = info.zm; 409 for (j=0, k=0, i=0; i<xm; i++) { 410 PetscInt Iloc = i+xm*(j+ym*k); 411 array2[i] = coords[Iloc*dim + 0]; 412 } 413 for (i=0, k=0, j=0; j<ym; j++) { 414 PetscInt Iloc = i+xm*(j+ym*k); 415 array2[j+xm] = dim > 1 ? coords[Iloc*dim + 1] : 0; 416 } 417 for (i=0, j=0, k=0; k<zm; k++) { 418 PetscInt Iloc = i+xm*(j+ym*k); 419 array2[k+xm+ym] = dim > 2 ? coords[Iloc*dim + 2] : 0; 420 } 421 PetscCallMPI(MPI_Send((void*)array2,xm+ym+zm,MPIU_SCALAR,0,tag,comm)); 422 } 423 PetscCall(VecRestoreArrayRead(Coords,&coords)); 424 } else { /* Fabricate some coordinates using grid index */ 425 for (i=0; i<xm; i++) { 426 array[i] = xs+i; 427 } 428 for (j=0; j<ym; j++) { 429 array[j+xm] = ys+j; 430 } 431 for (k=0; k<zm; k++) { 432 array[k+xm+ym] = zs+k; 433 } 434 } 435 if (rank == 0) { 436 PetscCall(PetscViewerVTKFWrite(viewer,fp,&(array[0]),xm,MPIU_SCALAR)); 437 PetscCall(PetscViewerVTKFWrite(viewer,fp,&(array[xm]),ym,MPIU_SCALAR)); 438 PetscCall(PetscViewerVTKFWrite(viewer,fp,&(array[xm+ym]),zm,MPIU_SCALAR)); 439 } 440 } 441 442 /* Write each of the objects queued up for this file */ 443 for (link=vtk->link; link; link=link->next) { 444 Vec X = (Vec)link->vec; 445 const PetscScalar *x; 446 PetscInt bs,f; 447 DM daCurr; 448 PetscBool fieldsnamed; 449 PetscCall(VecGetDM(X,&daCurr)); 450 PetscCall(DMDAGetInfo(daCurr,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&bs,NULL,NULL,NULL,NULL,NULL)); 451 452 PetscCall(VecGetArrayRead(X,&x)); 453 if (rank == 0) { 454 if (r) { 455 PetscMPIInt nn; 456 PetscCallMPI(MPI_Recv(array,nnodes*bs,MPIU_SCALAR,r,tag,comm,&status)); 457 PetscCallMPI(MPI_Get_count(&status,MPIU_SCALAR,&nn)); 458 PetscCheck(nn == nnodes*bs,PETSC_COMM_SELF,PETSC_ERR_PLIB,"Array size mismatch receiving from rank %" PetscInt_FMT,r); 459 } else PetscCall(PetscArraycpy(array,x,nnodes*bs)); 460 /* If any fields are named, add scalar fields. Otherwise, add a vector field */ 461 PetscCall(DMDAGetFieldsNamed(daCurr,&fieldsnamed)); 462 if (fieldsnamed) { 463 for (f=0; f<bs; f++) { 464 /* Extract and transpose the f'th field */ 465 for (k=0; k<zm; k++) { 466 for (j=0; j<ym; j++) { 467 for (i=0; i<xm; i++) { 468 PetscInt Iloc = i+xm*(j+ym*k); 469 array2[Iloc] = array[Iloc*bs + f]; 470 } 471 } 472 } 473 PetscCall(PetscViewerVTKFWrite(viewer,fp,array2,nnodes,MPIU_SCALAR)); 474 } 475 } 476 PetscCall(PetscViewerVTKFWrite(viewer,fp,array,nnodes*bs,MPIU_SCALAR)); 477 478 } else if (r == rank) { 479 PetscCallMPI(MPI_Send((void*)x,nnodes*bs,MPIU_SCALAR,0,tag,comm)); 480 } 481 PetscCall(VecRestoreArrayRead(X,&x)); 482 } 483 } 484 PetscCall(PetscFree2(array,array2)); 485 PetscCall(PetscFree(grloc)); 486 487 PetscCall(PetscFPrintf(comm,fp,"\n </AppendedData>\n")); 488 PetscCall(PetscFPrintf(comm,fp,"</VTKFile>\n")); 489 PetscCall(PetscFClose(comm,fp)); 490 PetscFunctionReturn(0); 491 } 492 493 /*@C 494 DMDAVTKWriteAll - Write a file containing all the fields that have been provided to the viewer 495 496 Collective 497 498 Input Parameters: 499 + odm - DM specifying the grid layout, passed as a PetscObject 500 - viewer - viewer of type VTK 501 502 Level: developer 503 504 Notes: 505 This function is a callback used by the VTK viewer to actually write the file. 506 The reason for this odd model is that the VTK file format does not provide any way to write one field at a time. 507 Instead, metadata for the entire file needs to be available up-front before you can start writing the file. 508 509 If any fields have been named (see e.g. DMDASetFieldName()), then individual scalar 510 fields are written. Otherwise, a single multi-dof (vector) field is written. 511 512 .seealso: `PETSCVIEWERVTK` 513 @*/ 514 PetscErrorCode DMDAVTKWriteAll(PetscObject odm,PetscViewer viewer) 515 { 516 DM dm = (DM)odm; 517 PetscBool isvtk; 518 519 PetscFunctionBegin; 520 PetscValidHeaderSpecific(dm,DM_CLASSID,1); 521 PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); 522 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERVTK,&isvtk)); 523 PetscCheck(isvtk,PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot use viewer type %s",((PetscObject)viewer)->type_name); 524 switch (viewer->format) { 525 case PETSC_VIEWER_VTK_VTS: 526 PetscCall(DMDAVTKWriteAll_VTS(dm,viewer)); 527 break; 528 case PETSC_VIEWER_VTK_VTR: 529 PetscCall(DMDAVTKWriteAll_VTR(dm,viewer)); 530 break; 531 default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for format '%s'",PetscViewerFormats[viewer->format]); 532 } 533 PetscFunctionReturn(0); 534 } 535