1program ex62f90 2#include "petsc/finclude/petsc.h" 3 use petsc 4 implicit none 5#include "exodusII.inc" 6 7 ! Get the fortran kind associated with PetscInt and PetscReal so that we can use literal constants. 8 PetscInt :: dummyPetscInt 9 PetscReal :: dummyPetscreal 10 integer,parameter :: kPI = kind(dummyPetscInt) 11 integer,parameter :: kPR = kind(dummyPetscReal) 12 13 type(tDM) :: dm,dmU,dmA,dmS,dmUA,dmUA2,pDM 14 type(tDM),dimension(:),pointer :: dmList 15 type(tVec) :: X,U,A,S,UA,UA2 16 type(tIS) :: isU,isA,isS,isUA 17 type(tPetscSection) :: section 18 PetscInt,dimension(1) :: fieldU = [0] 19 PetscInt,dimension(1) :: fieldA = [2] 20 PetscInt,dimension(1) :: fieldS = [1] 21 PetscInt,dimension(2) :: fieldUA = [0,2] 22 character(len=PETSC_MAX_PATH_LEN) :: ifilename,ofilename,IOBuffer 23 integer :: exoid = -1 24 type(tIS) :: csIS 25 PetscInt,dimension(:),pointer :: csID 26 PetscInt,dimension(:),pointer :: pStartDepth,pEndDepth 27 PetscInt :: order = 1 28 PetscInt :: sdim,d,pStart,pEnd,p,numCS,set,i,j 29 PetscMPIInt :: rank,numProc 30 PetscBool :: flg 31 PetscErrorCode :: ierr 32 MPI_Comm :: comm 33 type(tPetscViewer) :: viewer 34 35 Character(len=MXSTLN) :: sJunk 36 PetscInt :: numstep = 3, step 37 PetscInt :: numNodalVar,numZonalVar 38 character(len=MXSTLN) :: nodalVarName(4) 39 character(len=MXSTLN) :: zonalVarName(6) 40 logical,dimension(:,:),pointer :: truthtable 41 42 type(tIS) :: cellIS 43 PetscInt,dimension(:),pointer :: cellID 44 PetscInt :: numCells, cell, closureSize 45 PetscInt,dimension(:),pointer :: closureA,closure 46 47 type(tPetscSection) :: sectionUA,coordSection 48 type(tVec) :: UALoc,coord 49 PetscScalar,dimension(:),pointer :: cval,xyz 50 PetscInt :: dofUA,offUA,c 51 52 ! dof layout ordered by increasing height in the DAG: cell, face, edge, vertex 53 PetscInt,dimension(3),target :: dofS2D = [0, 0, 3] 54 PetscInt,dimension(3),target :: dofUP1Tri = [2, 0, 0] 55 PetscInt,dimension(3),target :: dofAP1Tri = [1, 0, 0] 56 PetscInt,dimension(3),target :: dofUP2Tri = [2, 2, 0] 57 PetscInt,dimension(3),target :: dofAP2Tri = [1, 1, 0] 58 PetscInt,dimension(3),target :: dofUP1Quad = [2, 0, 0] 59 PetscInt,dimension(3),target :: dofAP1Quad = [1, 0, 0] 60 PetscInt,dimension(3),target :: dofUP2Quad = [2, 2, 2] 61 PetscInt,dimension(3),target :: dofAP2Quad = [1, 1, 1] 62 PetscInt,dimension(4),target :: dofS3D = [0, 0, 0, 6] 63 PetscInt,dimension(4),target :: dofUP1Tet = [3, 0, 0, 0] 64 PetscInt,dimension(4),target :: dofAP1Tet = [1, 0, 0, 0] 65 PetscInt,dimension(4),target :: dofUP2Tet = [3, 3, 0, 0] 66 PetscInt,dimension(4),target :: dofAP2Tet = [1, 1, 0, 0] 67 PetscInt,dimension(4),target :: dofUP1Hex = [3, 0, 0, 0] 68 PetscInt,dimension(4),target :: dofAP1Hex = [1, 0, 0, 0] 69 PetscInt,dimension(4),target :: dofUP2Hex = [3, 3, 3, 3] 70 PetscInt,dimension(4),target :: dofAP2Hex = [1, 1, 1, 1] 71 PetscInt,dimension(:),pointer :: dofU,dofA,dofS 72 73 type(tPetscSF) :: migrationSF 74 PetscPartitioner :: part 75 76 type(tVec) :: tmpVec 77 PetscReal :: norm 78 PetscReal :: time = 1.234_kPR 79 80 PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr)) 81 if (ierr /= 0) then 82 print*,'Unable to initialize PETSc' 83 stop 84 endif 85 86 PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)) 87 PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,numProc,ierr)) 88 PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,"-i",ifilename,flg,ierr)) 89 if (.not. flg) then 90 SETERRA(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"missing input file name -i <input file name>") 91 end if 92 PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,"-o",ofilename,flg,ierr)) 93 if (.not. flg) then 94 SETERRA(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"missing output file name -o <output file name>") 95 end if 96 PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,"-order",order,flg,ierr)) 97 if ((order > 2) .or. (order < 1)) then 98 write(IOBuffer,'("Unsupported polynomial order ", I2, " not in [1,2]")') order 99 SETERRA(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,IOBuffer) 100 end if 101 102 ! Read the mesh in any supported format 103 PetscCallA(DMPlexCreateFromFile(PETSC_COMM_WORLD, ifilename,PETSC_NULL_CHARACTER,PETSC_TRUE,dm,ierr)) 104 PetscCallA(DMPlexDistributeSetDefault(dm,PETSC_FALSE,ierr)); 105 PetscCallA(DMSetFromOptions(dm,ierr)); 106 PetscCallA(DMGetDimension(dm, sdim,ierr)) 107 PetscCallA(DMViewFromOptions(dm, PETSC_NULL_OPTIONS,"-dm_view",ierr)); 108 109 ! Create the exodus result file 110 111 ! enable exodus debugging information 112 PetscCallA(exopts(EXVRBS+EXDEBG,ierr)) 113 ! Create the exodus file 114 PetscCallA(PetscViewerExodusIIOpen(PETSC_COMM_WORLD,ofilename,FILE_MODE_WRITE,viewer,ierr)) 115 ! The long way would be 116 ! 117 ! PetscCallA(PetscViewerCreate(PETSC_COMM_WORLD,viewer,ierr)) 118 ! PetscCallA(PetscViewerSetType(viewer,PETSCVIEWEREXODUSII,ierr)) 119 ! PetscCallA(PetscViewerFileSetMode(viewer,FILE_MODE_WRITE,ierr)) 120 ! PetscCallA(PetscViewerFileSetName(viewer,ofilename,ierr)) 121 122 ! set the mesh order 123 PetscCallA(PetscViewerExodusIISetOrder(viewer,order,ierr)) 124 PetscCallA(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD,ierr)) 125 ! 126 ! Notice how the exodus file is actually NOT open at this point (exoid is -1) 127 ! Since we are overwritting the file (mode is FILE_MODE_WRITE), we are going to have to 128 ! write the geometry (the DM), which can only be done on a brand new file. 129 ! 130 131 ! Save the geometry to the file, erasing all previous content 132 PetscCallA(DMView(dm,viewer,ierr)) 133 PetscCallA(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD,ierr)) 134 ! 135 ! Note how the exodus file is now open 136 ! 137 ! "Format" the exodus result file, i.e. allocate space for nodal and zonal variables 138 select case(sdim) 139 case(2) 140 numNodalVar = 3 141 nodalVarName(1:numNodalVar) = ["U_x ","U_y ","Alpha"] 142 numZonalVar = 3 143 zonalVarName(1:numZonalVar) = ["Sigma_11","Sigma_22","Sigma_12"] 144 case(3) 145 numNodalVar = 4 146 nodalVarName(1:numNodalVar) = ["U_x ","U_y ","U_z ","Alpha"] 147 numZonalVar = 6 148 zonalVarName(1:numZonalVar) = ["Sigma_11","Sigma_22","Sigma_33","Sigma_23","Sigma_13","Sigma_12"] 149 case default 150 write(IOBuffer,'("No layout for dimension ",I2)') sdim 151 end select 152 PetscCallA(PetscViewerExodusIIGetId(viewer,exoid,ierr)) 153 PetscCallA(expvp(exoid, "E", numZonalVar,ierr)) 154 PetscCallA(expvan(exoid, "E", numZonalVar, zonalVarName,ierr)) 155 PetscCallA(expvp(exoid, "N", numNodalVar,ierr)) 156 PetscCallA(expvan(exoid, "N", numNodalVar, nodalVarName,ierr)) 157 PetscCallA(exinq(exoid, EX_INQ_ELEM_BLK,numCS,PETSC_NULL_REAL,sjunk,ierr)) 158 159 ! An exodusII truth table specifies which fields are saved at which time step 160 ! It speeds up I/O but reserving space for fields in the file ahead of time. 161 allocate(truthtable(numCS,numZonalVar)) 162 truthtable = .true. 163 PetscCallA(expvtt(exoid, numCS, numZonalVar, truthtable, ierr)) 164 deallocate(truthtable) 165 166 ! Writing time step information in the file. Note that this is currently broken in the exodus library for netcdf4 (HDF5-based) files */ 167 do step = 1,numstep 168 PetscCallA(exptim(exoid,step,Real(step,kind=kPR),ierr)) 169 end do 170 171 PetscCallA(PetscObjectGetComm(dm,comm,ierr)) 172 PetscCallA(PetscSectionCreate(comm, section,ierr)) 173 PetscCallA(PetscSectionSetNumFields(section, 3_kPI,ierr)) 174 PetscCallA(PetscSectionSetFieldName(section, fieldU, "U",ierr)) 175 PetscCallA(PetscSectionSetFieldName(section, fieldA, "Alpha",ierr)) 176 PetscCallA(PetscSectionSetFieldName(section, fieldS, "Sigma",ierr)) 177 PetscCallA(DMPlexGetChart(dm, pStart, pEnd,ierr)) 178 PetscCallA(PetscSectionSetChart(section, pStart, pEnd,ierr)) 179 180 allocate(pStartDepth(sdim+1)) 181 allocate(pEndDepth(sdim+1)) 182 do d = 1, sdim+1 183 PetscCallA(DMPlexGetDepthStratum(dm, d-1, pStartDepth(d), pEndDepth(d),ierr)) 184 end do 185 186 ! Vector field U, Scalar field Alpha, Tensor field Sigma 187 PetscCallA(PetscSectionSetFieldComponents(section, fieldU, sdim,ierr)); 188 PetscCallA(PetscSectionSetFieldComponents(section, fieldA, 1_kPI,ierr)); 189 PetscCallA(PetscSectionSetFieldComponents(section, fieldS, sdim*(sdim+1)/2,ierr)); 190 191 ! Going through cell sets then cells, and setting up storage for the sections 192 PetscCallA(DMGetLabelSize(dm, "Cell Sets", numCS, ierr)) 193 PetscCallA(DMGetLabelIdIS(dm, "Cell Sets", csIS, ierr)) 194 PetscCallA(ISGetIndicesF90(csIS, csID, ierr)) 195 do set = 1,numCS 196 !!! This is pointless but will avoid a warning with gfortran and -Werror=maybe-uninitialized 197 nullify(dofA) 198 nullify(dofU) 199 nullify(dofS) 200 PetscCallA(DMGetStratumSize(dm, "Cell Sets", csID(set), numCells,ierr)) 201 PetscCallA(DMGetStratumIS(dm, "Cell Sets", csID(set), cellIS,ierr)) 202 if (numCells > 0) then 203 select case(sdim) 204 case(2) 205 dofs => dofS2D 206 case(3) 207 dofs => dofS3D 208 case default 209 write(IOBuffer,'("No layout for dimension ",I2)') sdim 210 SETERRA(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE,IOBuffer) 211 end select ! sdim 212 213 ! Identify cell type based on closure size only. This works for Tri/Tet/Quad/Hex meshes 214 ! It will not be enough to identify more exotic elements like pyramid or prisms... */ 215 PetscCallA(ISGetIndicesF90(cellIS, cellID,ierr)) 216 nullify(closureA) 217 PetscCallA(DMPlexGetTransitiveClosure(dm,cellID(1), PETSC_TRUE, closureA,ierr)) 218 select case(size(closureA)/2) 219 case(7) ! Tri 220 if (order == 1) then 221 dofU => dofUP1Tri 222 dofA => dofAP1Tri 223 else 224 dofU => dofUP2Tri 225 dofA => dofAP2Tri 226 end if 227 case(9) ! Quad 228 if (order == 1) then 229 dofU => dofUP1Quad 230 dofA => dofAP1Quad 231 else 232 dofU => dofUP2Quad 233 dofA => dofAP2Quad 234 end if 235 case(15) ! Tet 236 if (order == 1) then 237 dofU => dofUP1Tet 238 dofA => dofAP1Tet 239 else 240 dofU => dofUP2Tet 241 dofA => dofAP2Tet 242 end if 243 case(27) ! Hex 244 if (order == 1) then 245 dofU => dofUP1Hex 246 dofA => dofAP1Hex 247 else 248 dofU => dofUP2Hex 249 dofA => dofAP2Hex 250 end if 251 case default 252 write(IOBuffer,'("Unknown element with closure size ",I2)') size(closureA)/2 253 SETERRA(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,IOBuffer) 254 end select 255 PetscCallA(DMPlexRestoreTransitiveClosure(dm, cellID(1), PETSC_TRUE,closureA,ierr)) 256 do cell = 1,numCells! 257 nullify(closure) 258 PetscCallA(DMPlexGetTransitiveClosure(dm, cellID(cell), PETSC_TRUE, closure,ierr)) 259 do p = 1,size(closure),2 260 ! find the depth of p 261 do d = 1,sdim+1 262 if ((closure(p) >= pStartDepth(d)) .and. (closure(p) < pEndDepth(d))) then 263 PetscCallA(PetscSectionSetDof(section, closure(p), dofU(d)+dofA(d)+dofS(d),ierr)) 264 PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldU, dofU(d),ierr)) 265 PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldA, dofA(d),ierr)) 266 PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldS, dofS(d),ierr)) 267 end if ! closure(p) 268 end do ! d 269 end do ! p 270 PetscCallA(DMPlexRestoreTransitiveClosure(dm, cellID(cell), PETSC_TRUE, closure,ierr)) 271 end do ! cell 272 PetscCallA(ISRestoreIndicesF90(cellIS, cellID,ierr)) 273 PetscCallA(ISDestroy(cellIS,ierr)) 274 end if ! numCells 275 end do ! set 276 PetscCallA(ISRestoreIndicesF90(csIS, csID,ierr)) 277 PetscCallA(ISDestroy(csIS,ierr)) 278 PetscCallA(PetscSectionSetUp(section,ierr)) 279 PetscCallA(DMSetLocalSection(dm, section,ierr)) 280 PetscCallA(PetscObjectViewFromOptions(section, PETSC_NULL_SECTION, "-dm_section_view",ierr)) 281 PetscCallA(PetscSectionDestroy(section,ierr)) 282 283 PetscCallA(DMSetUseNatural(dm,PETSC_TRUE,ierr)) 284 PetscCallA(DMPlexGetPartitioner(dm,part,ierr)) 285 PetscCallA(PetscPartitionerSetFromOptions(part,ierr)) 286 PetscCallA(DMPlexDistribute(dm,0_kPI,migrationSF,pdm,ierr)) 287 288 if (numProc > 1) then 289 PetscCallA(DMPlexSetMigrationSF(pdm,migrationSF,ierr)) 290 PetscCallA(PetscSFDestroy(migrationSF,ierr)) 291 else 292 pdm = dm 293 end if 294 PetscCallA(DMViewFromOptions(pdm,PETSC_NULL_OPTIONS,"-dm_view",ierr)) 295 296 ! Get DM and IS for each field of dm 297 PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldU, isU, dmU,ierr)) 298 PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldA, isA, dmA,ierr)) 299 PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldS, isS, dmS,ierr)) 300 PetscCallA(DMCreateSubDM(pdm, 2_kPI, fieldUA, isUA, dmUA,ierr)) 301 302 !Create the exodus result file 303 allocate(dmList(2)) 304 dmList(1) = dmU; 305 dmList(2) = dmA; 306 PetscCallA(DMCreateSuperDM(dmList,2_kPI,PETSC_NULL_IS,dmUA2,ierr)) 307 deallocate(dmList) 308 309 PetscCallA(DMGetGlobalVector(pdm, X,ierr)) 310 PetscCallA(DMGetGlobalVector(dmU, U,ierr)) 311 PetscCallA(DMGetGlobalVector(dmA, A,ierr)) 312 PetscCallA(DMGetGlobalVector(dmS, S,ierr)) 313 PetscCallA(DMGetGlobalVector(dmUA, UA,ierr)) 314 PetscCallA(DMGetGlobalVector(dmUA2, UA2,ierr)) 315 316 PetscCallA(PetscObjectSetName(U, "U",ierr)) 317 PetscCallA(PetscObjectSetName(A, "Alpha",ierr)) 318 PetscCallA(PetscObjectSetName(S, "Sigma",ierr)) 319 PetscCallA(PetscObjectSetName(UA, "UAlpha",ierr)) 320 PetscCallA(PetscObjectSetName(UA2, "UAlpha2",ierr)) 321 PetscCallA(VecSet(X, -111.0_kPR,ierr)) 322 323 ! Setting u to [x,y,z] and alpha to x^2+y^2+z^2 by writing in UAlpha then restricting to U and Alpha */ 324 PetscCallA(DMGetLocalSection(dmUA, sectionUA,ierr)) 325 PetscCallA(DMGetLocalVector(dmUA, UALoc,ierr)) 326 PetscCallA(VecGetArrayF90(UALoc, cval,ierr)) 327 PetscCallA(DMGetCoordinateSection(dmUA, coordSection,ierr)) 328 PetscCallA(DMGetCoordinatesLocal(dmUA, coord,ierr)) 329 PetscCallA(DMPlexGetChart(dmUA, pStart, pEnd,ierr)) 330 331 do p = pStart,pEnd-1 332 PetscCallA(PetscSectionGetDof(sectionUA, p, dofUA,ierr)) 333 if (dofUA > 0) then 334 PetscCallA(PetscSectionGetOffset(sectionUA, p, offUA,ierr)) 335 PetscCallA(DMPlexVecGetClosure(dmUA, coordSection, coord, p, xyz,ierr)) 336 closureSize = size(xyz) 337 do i = 1,sdim 338 do j = 0, closureSize-1,sdim 339 cval(offUA+i) = cval(offUA+i) + xyz(j/sdim+i) 340 end do 341 cval(offUA+i) = cval(offUA+i) * sdim / closureSize; 342 cval(offUA+sdim+1) = cval(offUA+sdim+1) + cval(offUA+i)**2 343 end do 344 PetscCallA(DMPlexVecRestoreClosure(dmUA, coordSection, coord, p, xyz,ierr)) 345 end if 346 end do 347 348 PetscCallA(VecRestoreArrayF90(UALoc, cval,ierr)) 349 PetscCallA(DMLocalToGlobalBegin(dmUA, UALoc, INSERT_VALUES, UA,ierr)) 350 PetscCallA(DMLocalToGlobalEnd(dmUA, UALoc, INSERT_VALUES, UA,ierr)) 351 PetscCallA(DMRestoreLocalVector(dmUA, UALoc,ierr)) 352 353 !Update X 354 PetscCallA(VecISCopy(X, isUA, SCATTER_FORWARD, UA,ierr)) 355 ! Restrict to U and Alpha 356 PetscCallA(VecISCopy(X, isU, SCATTER_REVERSE, U,ierr)) 357 PetscCallA(VecISCopy(X, isA, SCATTER_REVERSE, A,ierr)) 358 PetscCallA(VecViewFromOptions(UA, PETSC_NULL_OPTIONS, "-ua_vec_view",ierr)) 359 PetscCallA(VecViewFromOptions(U, PETSC_NULL_OPTIONS, "-u_vec_view",ierr)) 360 PetscCallA(VecViewFromOptions(A, PETSC_NULL_OPTIONS, "-a_vec_view",ierr)) 361 ! restrict to UA2 362 PetscCallA(VecISCopy(X, isUA, SCATTER_REVERSE, UA2,ierr)) 363 PetscCallA(VecViewFromOptions(UA2, PETSC_NULL_OPTIONS, "-ua2_vec_view",ierr)) 364 365 ! Getting Natural Vec 366 PetscCallA(DMSetOutputSequenceNumber(dmU, 0_kPI, time, ierr)) 367 PetscCallA(DMSetOutputSequenceNumber(dmA, 0_kPI, time, ierr)) 368 369 PetscCallA(VecView(U, viewer,ierr)) 370 PetscCallA(VecView(A, viewer,ierr)) 371 372 !Saving U and Alpha in one shot. 373 !For this, we need to cheat and change the Vec's name 374 !Note that in the end we write variables one component at a time, 375 !so that there is no real value in doing this 376 PetscCallA(DMSetOutputSequenceNumber(dmUA,1_kPI,time,ierr)) 377 PetscCallA(DMGetGlobalVector(dmUA, tmpVec,ierr)) 378 PetscCallA(VecCopy(UA, tmpVec,ierr)) 379 PetscCallA(PetscObjectSetName(tmpVec, "U",ierr)) 380 PetscCallA(VecView(tmpVec, viewer,ierr)) 381 382 ! Reading nodal variables in Exodus file 383 PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr)) 384 PetscCallA(VecLoad(tmpVec, viewer,ierr)) 385 PetscCallA(VecAXPY(UA, -1.0_kPR, tmpVec,ierr)) 386 PetscCallA(VecNorm(UA, NORM_INFINITY, norm,ierr)) 387 if (norm > PETSC_SQRT_MACHINE_EPSILON) then 388 write(IOBuffer,'("UAlpha ||Vin - Vout|| = ",ES12.5)') norm 389 end if 390 PetscCallA(DMRestoreGlobalVector(dmUA, tmpVec,ierr)) 391 392 ! same thing with the UA2 Vec obtained from the superDM 393 PetscCallA(DMGetGlobalVector(dmUA2, tmpVec,ierr)) 394 PetscCallA(VecCopy(UA2, tmpVec,ierr)) 395 PetscCallA(PetscObjectSetName(tmpVec, "U",ierr)) 396 PetscCallA(DMSetOutputSequenceNumber(dmUA2,2_kPI,time,ierr)) 397 PetscCallA(VecView(tmpVec, viewer,ierr)) 398 399 ! Reading nodal variables in Exodus file 400 PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr)) 401 PetscCallA(VecLoad(tmpVec,viewer,ierr)) 402 PetscCallA(VecAXPY(UA2, -1.0_kPR, tmpVec,ierr)) 403 PetscCallA(VecNorm(UA2, NORM_INFINITY, norm,ierr)) 404 if (norm > PETSC_SQRT_MACHINE_EPSILON) then 405 write(IOBuffer,'("UAlpha2 ||Vin - Vout|| = ",ES12.5)') norm 406 end if 407 PetscCallA(DMRestoreGlobalVector(dmUA2, tmpVec,ierr)) 408 409 ! Building and saving Sigma 410 ! We set sigma_0 = rank (to see partitioning) 411 ! sigma_1 = cell set ID 412 ! sigma_2 = x_coordinate of the cell center of mass 413 PetscCallA(DMGetCoordinateSection(dmS, coordSection,ierr)) 414 PetscCallA(DMGetCoordinatesLocal(dmS, coord,ierr)) 415 PetscCallA(DMGetLabelIdIS(dmS, "Cell Sets", csIS,ierr)) 416 PetscCallA(DMGetLabelSize(dmS, "Cell Sets",numCS,ierr)) 417 PetscCallA(ISGetIndicesF90(csIS, csID,ierr)) 418 419 do set = 1, numCS 420 PetscCallA(DMGetStratumIS(dmS, "Cell Sets", csID(set), cellIS,ierr)) 421 PetscCallA(ISGetIndicesF90(cellIS, cellID,ierr)) 422 PetscCallA(ISGetSize(cellIS, numCells,ierr)) 423 do cell = 1,numCells 424 PetscCallA(DMPlexVecGetClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval,ierr)) 425 PetscCallA(DMPlexVecGetClosure(dmS, coordSection, coord, cellID(cell), xyz,ierr)) 426 cval(1) = rank 427 cval(2) = csID(set) 428 cval(3) = 0.0_kPR 429 do c = 1, size(xyz),sdim 430 cval(3) = cval(3) + xyz(c) 431 end do 432 cval(3) = cval(3) * sdim / size(xyz) 433 PetscCallA(DMPlexVecSetClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval, INSERT_ALL_VALUES,ierr)) 434 PetscCallA(DMPlexVecRestoreClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval,ierr)) 435 PetscCallA(DMPlexVecRestoreClosure(dmS, coordSection, coord, cellID(cell), xyz,ierr)) 436 end do 437 PetscCallA(ISRestoreIndicesF90(cellIS, cellID,ierr)) 438 PetscCallA(ISDestroy(cellIS,ierr)) 439 end do 440 PetscCallA(ISRestoreIndicesF90(csIS, csID,ierr)) 441 PetscCallA(ISDestroy(csIS,ierr)) 442 PetscCallA(VecViewFromOptions(S, PETSC_NULL_OPTIONS, "-s_vec_view",ierr)) 443 444 ! Writing zonal variables in Exodus file 445 PetscCallA(DMSetOutputSequenceNumber(dmS,0_kPI,time,ierr)) 446 PetscCallA(VecView(S,viewer,ierr)) 447 448 ! Reading zonal variables in Exodus file */ 449 PetscCallA(DMGetGlobalVector(dmS, tmpVec,ierr)) 450 PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr)) 451 PetscCallA(PetscObjectSetName(tmpVec, "Sigma",ierr)) 452 PetscCallA(VecLoad(tmpVec,viewer,ierr)) 453 PetscCallA(VecAXPY(S, -1.0_kPR, tmpVec,ierr)) 454 PetscCallA(VecNorm(S, NORM_INFINITY,norm,ierr)) 455 if (norm > PETSC_SQRT_MACHINE_EPSILON) then 456 write(IOBuffer,'("Sigma ||Vin - Vout|| = ",ES12.5)') norm 457 end if 458 PetscCallA(DMRestoreGlobalVector(dmS, tmpVec,ierr)) 459 460 PetscCallA(DMRestoreGlobalVector(dmUA2, UA2,ierr)) 461 PetscCallA(DMRestoreGlobalVector(dmUA, UA,ierr)) 462 PetscCallA(DMRestoreGlobalVector(dmS, S,ierr)) 463 PetscCallA(DMRestoreGlobalVector(dmA, A,ierr)) 464 PetscCallA(DMRestoreGlobalVector(dmU, U,ierr)) 465 PetscCallA(DMRestoreGlobalVector(pdm, X,ierr)) 466 PetscCallA(DMDestroy(dmU,ierr)) 467 PetscCallA(ISDestroy(isU,ierr)) 468 PetscCallA(DMDestroy(dmA,ierr)) 469 PetscCallA(ISDestroy(isA,ierr)) 470 PetscCallA(DMDestroy(dmS,ierr)) 471 PetscCallA(ISDestroy(isS,ierr)) 472 PetscCallA(DMDestroy(dmUA,ierr)) 473 PetscCallA(ISDestroy(isUA,ierr)) 474 PetscCallA(DMDestroy(dmUA2,ierr)) 475 PetscCallA(DMDestroy(pdm,ierr)) 476 if (numProc > 1) then 477 PetscCallA(DMDestroy(dm,ierr)) 478 end if 479 480 deallocate(pStartDepth) 481 deallocate(pEndDepth) 482 483 PetscCallA(PetscViewerDestroy(viewer,ierr)) 484 PetscCallA(PetscFinalize(ierr)) 485end program ex62f90 486 487! /*TEST 488! 489! build: 490! requires: exodusii pnetcdf !complex 491! # 2D seq 492! test: 493! suffix: 0 494! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 495! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 496! test: 497! suffix: 1 498! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 499! 500! test: 501! suffix: 2 502! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 503! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 504! test: 505! suffix: 3 506! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 507! test: 508! suffix: 4 509! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 510! test: 511! suffix: 5 512! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 513! # 2D par 514! test: 515! suffix: 6 516! nsize: 2 517! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 518! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 519! test: 520! suffix: 7 521! nsize: 2 522! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 523! test: 524! suffix: 8 525! nsize: 2 526! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 527! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: invalid dimension ID or name 528! test: 529! suffix: 9 530! nsize: 2 531! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 532! test: 533! suffix: 10 534! nsize: 2 535! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 536! test: 537! # Something is now broken with parallel read/write for wahtever shape H is 538! TODO: broken 539! suffix: 11 540! nsize: 2 541! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 542 543! #3d seq 544! test: 545! suffix: 12 546! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 547! test: 548! suffix: 13 549! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 550! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 551! test: 552! suffix: 14 553! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 554! test: 555! suffix: 15 556! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 557! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 558! #3d par 559! test: 560! suffix: 16 561! nsize: 2 562! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 563! test: 564! suffix: 17 565! nsize: 2 566! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 567! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 568! test: 569! suffix: 18 570! nsize: 2 571! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 572! test: 573! suffix: 19 574! nsize: 2 575! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 576! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 577 578! TEST*/ 579