1program ex62f90 2#include "petsc/finclude/petsc.h" 3 use petsc 4 implicit none 5#include "exodusII.inc" 6 7 ! Get the Fortran kind associated with PetscInt and PetscReal so that we can use literal constants. 8 PetscInt :: dummyPetscInt 9 PetscReal :: dummyPetscreal 10 integer,parameter :: kPI = kind(dummyPetscInt) 11 integer,parameter :: kPR = kind(dummyPetscReal) 12 13 type(tDM) :: dm,dmU,dmA,dmS,dmUA,dmUA2,pDM 14 type(tDM),dimension(:),pointer :: dmList 15 type(tVec) :: X,U,A,S,UA,UA2 16 type(tIS) :: isU,isA,isS,isUA 17 type(tPetscSection) :: section 18 PetscInt,dimension(1) :: fieldU = [0] 19 PetscInt,dimension(1) :: fieldA = [2] 20 PetscInt,dimension(1) :: fieldS = [1] 21 PetscInt,dimension(2) :: fieldUA = [0,2] 22 character(len=PETSC_MAX_PATH_LEN) :: ifilename,ofilename,IOBuffer 23 integer :: exoid = -1 24 type(tIS) :: csIS 25 PetscInt,dimension(:),pointer :: csID 26 PetscInt,dimension(:),pointer :: pStartDepth,pEndDepth 27 PetscInt :: order = 1 28 PetscInt :: sdim,d,pStart,pEnd,p,numCS,set,i,j 29 PetscMPIInt :: rank,numProc 30 PetscBool :: flg 31 PetscErrorCode :: ierr 32 MPI_Comm :: comm 33 type(tPetscViewer) :: viewer 34 35 Character(len=MXSTLN) :: sJunk 36 PetscInt :: numstep = 3, step 37 PetscInt :: numNodalVar,numZonalVar 38 character(len=MXSTLN) :: nodalVarName(4) 39 character(len=MXSTLN) :: zonalVarName(6) 40 logical,dimension(:,:),pointer :: truthtable 41 42 type(tIS) :: cellIS 43 PetscInt,dimension(:),pointer :: cellID 44 PetscInt :: numCells, cell, closureSize 45 PetscInt,dimension(:),pointer :: closureA,closure 46 47 type(tPetscSection) :: sectionUA,coordSection 48 type(tVec) :: UALoc,coord 49 PetscScalar,dimension(:),pointer :: cval,xyz 50 PetscInt :: dofUA,offUA,c 51 52 ! dof layout ordered by increasing height in the DAG: cell, face, edge, vertex 53 PetscInt,dimension(3),target :: dofS2D = [0, 0, 3] 54 PetscInt,dimension(3),target :: dofUP1Tri = [2, 0, 0] 55 PetscInt,dimension(3),target :: dofAP1Tri = [1, 0, 0] 56 PetscInt,dimension(3),target :: dofUP2Tri = [2, 2, 0] 57 PetscInt,dimension(3),target :: dofAP2Tri = [1, 1, 0] 58 PetscInt,dimension(3),target :: dofUP1Quad = [2, 0, 0] 59 PetscInt,dimension(3),target :: dofAP1Quad = [1, 0, 0] 60 PetscInt,dimension(3),target :: dofUP2Quad = [2, 2, 2] 61 PetscInt,dimension(3),target :: dofAP2Quad = [1, 1, 1] 62 PetscInt,dimension(4),target :: dofS3D = [0, 0, 0, 6] 63 PetscInt,dimension(4),target :: dofUP1Tet = [3, 0, 0, 0] 64 PetscInt,dimension(4),target :: dofAP1Tet = [1, 0, 0, 0] 65 PetscInt,dimension(4),target :: dofUP2Tet = [3, 3, 0, 0] 66 PetscInt,dimension(4),target :: dofAP2Tet = [1, 1, 0, 0] 67 PetscInt,dimension(4),target :: dofUP1Hex = [3, 0, 0, 0] 68 PetscInt,dimension(4),target :: dofAP1Hex = [1, 0, 0, 0] 69 PetscInt,dimension(4),target :: dofUP2Hex = [3, 3, 3, 3] 70 PetscInt,dimension(4),target :: dofAP2Hex = [1, 1, 1, 1] 71 PetscInt,dimension(:),pointer :: dofU,dofA,dofS 72 73 type(tPetscSF) :: migrationSF 74 PetscPartitioner :: part 75 PetscLayout :: layout 76 77 type(tVec) :: tmpVec 78 PetscReal :: norm 79 PetscReal :: time = 1.234_kPR 80 81 PetscCallA(PetscInitialize(PETSC_NULL_CHARACTER,ierr)) 82 if (ierr /= 0) then 83 print*,'Unable to initialize PETSc' 84 stop 85 endif 86 87 PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)) 88 PetscCallMPIA(MPI_Comm_size(PETSC_COMM_WORLD,numProc,ierr)) 89 PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-i',ifilename,flg,ierr)) 90 PetscCheckA(flg,PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,'missing input file name -i <input file name>') 91 PetscCallA(PetscOptionsGetString(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-o',ofilename,flg,ierr)) 92 PetscCheckA(flg,PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,'missing output file name -o <output file name>') 93 PetscCallA(PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-order',order,flg,ierr)) 94 if ((order > 2) .or. (order < 1)) then 95 write(IOBuffer,'("Unsupported polynomial order ", I2, " not in [1,2]")') order 96 SETERRA(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,IOBuffer) 97 end if 98 99 ! Read the mesh in any supported format 100 PetscCallA(DMPlexCreateFromFile(PETSC_COMM_WORLD, ifilename,PETSC_NULL_CHARACTER,PETSC_TRUE,dm,ierr)) 101 PetscCallA(DMPlexDistributeSetDefault(dm,PETSC_FALSE,ierr)); 102 PetscCallA(DMSetFromOptions(dm,ierr)); 103 PetscCallA(DMGetDimension(dm, sdim,ierr)) 104 PetscCallA(DMViewFromOptions(dm, PETSC_NULL_OPTIONS,'-dm_view',ierr)); 105 106 ! Create the exodus result file 107 108 ! enable exodus debugging information 109 PetscCallA(exopts(EXVRBS+EXDEBG,ierr)) 110 ! Create the exodus file 111 PetscCallA(PetscViewerExodusIIOpen(PETSC_COMM_WORLD,ofilename,FILE_MODE_WRITE,viewer,ierr)) 112 ! The long way would be 113 ! 114 ! PetscCallA(PetscViewerCreate(PETSC_COMM_WORLD,viewer,ierr)) 115 ! PetscCallA(PetscViewerSetType(viewer,PETSCVIEWEREXODUSII,ierr)) 116 ! PetscCallA(PetscViewerFileSetMode(viewer,FILE_MODE_WRITE,ierr)) 117 ! PetscCallA(PetscViewerFileSetName(viewer,ofilename,ierr)) 118 119 ! set the mesh order 120 PetscCallA(PetscViewerExodusIISetOrder(viewer,order,ierr)) 121 PetscCallA(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD,ierr)) 122 ! 123 ! Notice how the exodus file is actually NOT open at this point (exoid is -1) 124 ! Since we are overwriting the file (mode is FILE_MODE_WRITE), we are going to have to 125 ! write the geometry (the DM), which can only be done on a brand new file. 126 ! 127 128 ! Save the geometry to the file, erasing all previous content 129 PetscCallA(DMView(dm,viewer,ierr)) 130 PetscCallA(PetscViewerView(viewer,PETSC_VIEWER_STDOUT_WORLD,ierr)) 131 ! 132 ! Note how the exodus file is now open 133 ! 134 ! 'Format' the exodus result file, i.e. allocate space for nodal and zonal variables 135 select case(sdim) 136 case(2) 137 numNodalVar = 3 138 nodalVarName(1:numNodalVar) = ['U_x ','U_y ','Alpha'] 139 numZonalVar = 3 140 zonalVarName(1:numZonalVar) = ['Sigma_11','Sigma_22','Sigma_12'] 141 case(3) 142 numNodalVar = 4 143 nodalVarName(1:numNodalVar) = ['U_x ','U_y ','U_z ','Alpha'] 144 numZonalVar = 6 145 zonalVarName(1:numZonalVar) = ['Sigma_11','Sigma_22','Sigma_33','Sigma_23','Sigma_13','Sigma_12'] 146 case default 147 write(IOBuffer,'("No layout for dimension ",I2)') sdim 148 end select 149 PetscCallA(PetscViewerExodusIIGetId(viewer,exoid,ierr)) 150 PetscCallA(expvp(exoid, 'E', numZonalVar,ierr)) 151 PetscCallA(expvan(exoid, 'E', numZonalVar, zonalVarName,ierr)) 152 PetscCallA(expvp(exoid, 'N', numNodalVar,ierr)) 153 PetscCallA(expvan(exoid, 'N', numNodalVar, nodalVarName,ierr)) 154 PetscCallA(exinq(exoid, EX_INQ_ELEM_BLK,numCS,PETSC_NULL_REAL,sjunk,ierr)) 155 156 ! An exodusII truth table specifies which fields are saved at which time step 157 ! It speeds up I/O but reserving space for fields in the file ahead of time. 158 allocate(truthtable(numCS,numZonalVar)) 159 truthtable = .true. 160 PetscCallA(expvtt(exoid, numCS, numZonalVar, truthtable, ierr)) 161 deallocate(truthtable) 162 163 ! Writing time step information in the file. Note that this is currently broken in the exodus library for netcdf4 (HDF5-based) files */ 164 do step = 1,numstep 165 PetscCallA(exptim(exoid,step,Real(step,kind=kPR),ierr)) 166 end do 167 168 PetscCallA(PetscObjectGetComm(dm,comm,ierr)) 169 PetscCallA(PetscSectionCreate(comm, section,ierr)) 170 PetscCallA(PetscSectionSetNumFields(section, 3_kPI,ierr)) 171 PetscCallA(PetscSectionSetFieldName(section, fieldU, 'U',ierr)) 172 PetscCallA(PetscSectionSetFieldName(section, fieldA, 'Alpha',ierr)) 173 PetscCallA(PetscSectionSetFieldName(section, fieldS, 'Sigma',ierr)) 174 PetscCallA(DMPlexGetChart(dm, pStart, pEnd,ierr)) 175 PetscCallA(PetscSectionSetChart(section, pStart, pEnd,ierr)) 176 177 allocate(pStartDepth(sdim+1)) 178 allocate(pEndDepth(sdim+1)) 179 do d = 1, sdim+1 180 PetscCallA(DMPlexGetDepthStratum(dm, d-1, pStartDepth(d), pEndDepth(d),ierr)) 181 end do 182 183 ! Vector field U, Scalar field Alpha, Tensor field Sigma 184 PetscCallA(PetscSectionSetFieldComponents(section, fieldU, sdim,ierr)); 185 PetscCallA(PetscSectionSetFieldComponents(section, fieldA, 1_kPI,ierr)); 186 PetscCallA(PetscSectionSetFieldComponents(section, fieldS, sdim*(sdim+1)/2,ierr)); 187 188 ! Going through cell sets then cells, and setting up storage for the sections 189 PetscCallA(DMGetLabelSize(dm, 'Cell Sets', numCS, ierr)) 190 PetscCallA(DMGetLabelIdIS(dm, 'Cell Sets', csIS, ierr)) 191 PetscCallA(ISGetIndicesF90(csIS, csID, ierr)) 192 do set = 1,numCS 193 !!! This is pointless but will avoid a warning with gfortran and -Werror=maybe-uninitialized 194 nullify(dofA) 195 nullify(dofU) 196 nullify(dofS) 197 PetscCallA(DMGetStratumSize(dm, 'Cell Sets', csID(set), numCells,ierr)) 198 PetscCallA(DMGetStratumIS(dm, 'Cell Sets', csID(set), cellIS,ierr)) 199 if (numCells > 0) then 200 select case(sdim) 201 case(2) 202 dofs => dofS2D 203 case(3) 204 dofs => dofS3D 205 case default 206 write(IOBuffer,'("No layout for dimension ",I2)') sdim 207 SETERRA(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE,IOBuffer) 208 end select ! sdim 209 210 ! Identify cell type based on closure size only. This works for Tri/Tet/Quad/Hex meshes 211 ! It will not be enough to identify more exotic elements like pyramid or prisms... */ 212 PetscCallA(ISGetIndicesF90(cellIS, cellID,ierr)) 213 nullify(closureA) 214 PetscCallA(DMPlexGetTransitiveClosure(dm,cellID(1), PETSC_TRUE, closureA,ierr)) 215 select case(size(closureA)/2) 216 case(7) ! Tri 217 if (order == 1) then 218 dofU => dofUP1Tri 219 dofA => dofAP1Tri 220 else 221 dofU => dofUP2Tri 222 dofA => dofAP2Tri 223 end if 224 case(9) ! Quad 225 if (order == 1) then 226 dofU => dofUP1Quad 227 dofA => dofAP1Quad 228 else 229 dofU => dofUP2Quad 230 dofA => dofAP2Quad 231 end if 232 case(15) ! Tet 233 if (order == 1) then 234 dofU => dofUP1Tet 235 dofA => dofAP1Tet 236 else 237 dofU => dofUP2Tet 238 dofA => dofAP2Tet 239 end if 240 case(27) ! Hex 241 if (order == 1) then 242 dofU => dofUP1Hex 243 dofA => dofAP1Hex 244 else 245 dofU => dofUP2Hex 246 dofA => dofAP2Hex 247 end if 248 case default 249 write(IOBuffer,'("Unknown element with closure size ",I2)') size(closureA)/2 250 SETERRA(PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP,IOBuffer) 251 end select 252 PetscCallA(DMPlexRestoreTransitiveClosure(dm, cellID(1), PETSC_TRUE,closureA,ierr)) 253 do cell = 1,numCells! 254 nullify(closure) 255 PetscCallA(DMPlexGetTransitiveClosure(dm, cellID(cell), PETSC_TRUE, closure,ierr)) 256 do p = 1,size(closure),2 257 ! find the depth of p 258 do d = 1,sdim+1 259 if ((closure(p) >= pStartDepth(d)) .and. (closure(p) < pEndDepth(d))) then 260 PetscCallA(PetscSectionSetDof(section, closure(p), dofU(d)+dofA(d)+dofS(d),ierr)) 261 PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldU, dofU(d),ierr)) 262 PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldA, dofA(d),ierr)) 263 PetscCallA(PetscSectionSetFieldDof(section, closure(p), fieldS, dofS(d),ierr)) 264 end if ! closure(p) 265 end do ! d 266 end do ! p 267 PetscCallA(DMPlexRestoreTransitiveClosure(dm, cellID(cell), PETSC_TRUE, closure,ierr)) 268 end do ! cell 269 PetscCallA(ISRestoreIndicesF90(cellIS, cellID,ierr)) 270 PetscCallA(ISDestroy(cellIS,ierr)) 271 end if ! numCells 272 end do ! set 273 PetscCallA(ISRestoreIndicesF90(csIS, csID,ierr)) 274 PetscCallA(ISDestroy(csIS,ierr)) 275 PetscCallA(PetscSectionSetUp(section,ierr)) 276 PetscCallA(DMSetLocalSection(dm, section,ierr)) 277 PetscCallA(PetscObjectViewFromOptions(section, PETSC_NULL_SECTION, '-dm_section_view',ierr)) 278 PetscCallA(PetscSectionGetValueLayout(PETSC_COMM_WORLD, section, layout, ierr)) 279 PetscCallA(PetscLayoutDestroy(layout,ierr)) 280 PetscCallA(PetscSectionDestroy(section,ierr)) 281 282 PetscCallA(DMSetUseNatural(dm,PETSC_TRUE,ierr)) 283 PetscCallA(DMPlexGetPartitioner(dm,part,ierr)) 284 PetscCallA(PetscPartitionerSetFromOptions(part,ierr)) 285 PetscCallA(DMPlexDistribute(dm,0_kPI,migrationSF,pdm,ierr)) 286 287 if (numProc > 1) then 288 PetscCallA(DMPlexSetMigrationSF(pdm,migrationSF,ierr)) 289 PetscCallA(PetscSFDestroy(migrationSF,ierr)) 290 else 291 pdm = dm 292 end if 293 PetscCallA(DMViewFromOptions(pdm,PETSC_NULL_OPTIONS,'-dm_view',ierr)) 294 295 ! Get DM and IS for each field of dm 296 PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldU, isU, dmU,ierr)) 297 PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldA, isA, dmA,ierr)) 298 PetscCallA(DMCreateSubDM(pdm, 1_kPI, fieldS, isS, dmS,ierr)) 299 PetscCallA(DMCreateSubDM(pdm, 2_kPI, fieldUA, isUA, dmUA,ierr)) 300 301 !Create the exodus result file 302 allocate(dmList(2)) 303 dmList(1) = dmU; 304 dmList(2) = dmA; 305 PetscCallA(DMCreateSuperDM(dmList,2_kPI,PETSC_NULL_IS,dmUA2,ierr)) 306 deallocate(dmList) 307 308 PetscCallA(DMGetGlobalVector(pdm, X,ierr)) 309 PetscCallA(DMGetGlobalVector(dmU, U,ierr)) 310 PetscCallA(DMGetGlobalVector(dmA, A,ierr)) 311 PetscCallA(DMGetGlobalVector(dmS, S,ierr)) 312 PetscCallA(DMGetGlobalVector(dmUA, UA,ierr)) 313 PetscCallA(DMGetGlobalVector(dmUA2, UA2,ierr)) 314 315 PetscCallA(PetscObjectSetName(U, 'U',ierr)) 316 PetscCallA(PetscObjectSetName(A, 'Alpha',ierr)) 317 PetscCallA(PetscObjectSetName(S, 'Sigma',ierr)) 318 PetscCallA(PetscObjectSetName(UA, 'UAlpha',ierr)) 319 PetscCallA(PetscObjectSetName(UA2, 'UAlpha2',ierr)) 320 PetscCallA(VecSet(X, -111.0_kPR,ierr)) 321 322 ! Setting u to [x,y,z] and alpha to x^2+y^2+z^2 by writing in UAlpha then restricting to U and Alpha */ 323 PetscCallA(DMGetLocalSection(dmUA, sectionUA,ierr)) 324 PetscCallA(DMGetLocalVector(dmUA, UALoc,ierr)) 325 PetscCallA(VecGetArrayF90(UALoc, cval,ierr)) 326 PetscCallA(DMGetCoordinateSection(dmUA, coordSection,ierr)) 327 PetscCallA(DMGetCoordinatesLocal(dmUA, coord,ierr)) 328 PetscCallA(DMPlexGetChart(dmUA, pStart, pEnd,ierr)) 329 330 do p = pStart,pEnd-1 331 PetscCallA(PetscSectionGetDof(sectionUA, p, dofUA,ierr)) 332 if (dofUA > 0) then 333 PetscCallA(PetscSectionGetOffset(sectionUA, p, offUA,ierr)) 334 PetscCallA(DMPlexVecGetClosure(dmUA, coordSection, coord, p, xyz,ierr)) 335 closureSize = size(xyz) 336 do i = 1,sdim 337 do j = 0, closureSize-1,sdim 338 cval(offUA+i) = cval(offUA+i) + xyz(j/sdim+i) 339 end do 340 cval(offUA+i) = cval(offUA+i) * sdim / closureSize; 341 cval(offUA+sdim+1) = cval(offUA+sdim+1) + cval(offUA+i)**2 342 end do 343 PetscCallA(DMPlexVecRestoreClosure(dmUA, coordSection, coord, p, xyz,ierr)) 344 end if 345 end do 346 347 PetscCallA(VecRestoreArrayF90(UALoc, cval,ierr)) 348 PetscCallA(DMLocalToGlobalBegin(dmUA, UALoc, INSERT_VALUES, UA,ierr)) 349 PetscCallA(DMLocalToGlobalEnd(dmUA, UALoc, INSERT_VALUES, UA,ierr)) 350 PetscCallA(DMRestoreLocalVector(dmUA, UALoc,ierr)) 351 352 !Update X 353 PetscCallA(VecISCopy(X, isUA, SCATTER_FORWARD, UA,ierr)) 354 ! Restrict to U and Alpha 355 PetscCallA(VecISCopy(X, isU, SCATTER_REVERSE, U,ierr)) 356 PetscCallA(VecISCopy(X, isA, SCATTER_REVERSE, A,ierr)) 357 PetscCallA(VecViewFromOptions(UA, PETSC_NULL_OPTIONS, '-ua_vec_view',ierr)) 358 PetscCallA(VecViewFromOptions(U, PETSC_NULL_OPTIONS, '-u_vec_view',ierr)) 359 PetscCallA(VecViewFromOptions(A, PETSC_NULL_OPTIONS, '-a_vec_view',ierr)) 360 ! restrict to UA2 361 PetscCallA(VecISCopy(X, isUA, SCATTER_REVERSE, UA2,ierr)) 362 PetscCallA(VecViewFromOptions(UA2, PETSC_NULL_OPTIONS, '-ua2_vec_view',ierr)) 363 364 ! Getting Natural Vec 365 PetscCallA(DMSetOutputSequenceNumber(dmU, 0_kPI, time, ierr)) 366 PetscCallA(DMSetOutputSequenceNumber(dmA, 0_kPI, time, ierr)) 367 368 PetscCallA(VecView(U, viewer,ierr)) 369 PetscCallA(VecView(A, viewer,ierr)) 370 371 !Saving U and Alpha in one shot. 372 !For this, we need to cheat and change the Vec's name 373 !Note that in the end we write variables one component at a time, 374 !so that there is no real value in doing this 375 PetscCallA(DMSetOutputSequenceNumber(dmUA,1_kPI,time,ierr)) 376 PetscCallA(DMGetGlobalVector(dmUA, tmpVec,ierr)) 377 PetscCallA(VecCopy(UA, tmpVec,ierr)) 378 PetscCallA(PetscObjectSetName(tmpVec, 'U',ierr)) 379 PetscCallA(VecView(tmpVec, viewer,ierr)) 380 381 ! Reading nodal variables in Exodus file 382 PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr)) 383 PetscCallA(VecLoad(tmpVec, viewer,ierr)) 384 PetscCallA(VecAXPY(UA, -1.0_kPR, tmpVec,ierr)) 385 PetscCallA(VecNorm(UA, NORM_INFINITY, norm,ierr)) 386 if (norm > PETSC_SQRT_MACHINE_EPSILON) then 387 write(IOBuffer,'("UAlpha ||Vin - Vout|| = ",ES12.5)') norm 388 end if 389 PetscCallA(DMRestoreGlobalVector(dmUA, tmpVec,ierr)) 390 391 ! same thing with the UA2 Vec obtained from the superDM 392 PetscCallA(DMGetGlobalVector(dmUA2, tmpVec,ierr)) 393 PetscCallA(VecCopy(UA2, tmpVec,ierr)) 394 PetscCallA(PetscObjectSetName(tmpVec, 'U',ierr)) 395 PetscCallA(DMSetOutputSequenceNumber(dmUA2,2_kPI,time,ierr)) 396 PetscCallA(VecView(tmpVec, viewer,ierr)) 397 398 ! Reading nodal variables in Exodus file 399 PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr)) 400 PetscCallA(VecLoad(tmpVec,viewer,ierr)) 401 PetscCallA(VecAXPY(UA2, -1.0_kPR, tmpVec,ierr)) 402 PetscCallA(VecNorm(UA2, NORM_INFINITY, norm,ierr)) 403 if (norm > PETSC_SQRT_MACHINE_EPSILON) then 404 write(IOBuffer,'("UAlpha2 ||Vin - Vout|| = ",ES12.5)') norm 405 end if 406 PetscCallA(DMRestoreGlobalVector(dmUA2, tmpVec,ierr)) 407 408 ! Building and saving Sigma 409 ! We set sigma_0 = rank (to see partitioning) 410 ! sigma_1 = cell set ID 411 ! sigma_2 = x_coordinate of the cell center of mass 412 PetscCallA(DMGetCoordinateSection(dmS, coordSection,ierr)) 413 PetscCallA(DMGetCoordinatesLocal(dmS, coord,ierr)) 414 PetscCallA(DMGetLabelIdIS(dmS, 'Cell Sets', csIS,ierr)) 415 PetscCallA(DMGetLabelSize(dmS, 'Cell Sets',numCS,ierr)) 416 PetscCallA(ISGetIndicesF90(csIS, csID,ierr)) 417 418 do set = 1, numCS 419 PetscCallA(DMGetStratumIS(dmS, 'Cell Sets', csID(set), cellIS,ierr)) 420 PetscCallA(ISGetIndicesF90(cellIS, cellID,ierr)) 421 PetscCallA(ISGetSize(cellIS, numCells,ierr)) 422 do cell = 1,numCells 423 PetscCallA(DMPlexVecGetClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval,ierr)) 424 PetscCallA(DMPlexVecGetClosure(dmS, coordSection, coord, cellID(cell), xyz,ierr)) 425 cval(1) = rank 426 cval(2) = csID(set) 427 cval(3) = 0.0_kPR 428 do c = 1, size(xyz),sdim 429 cval(3) = cval(3) + xyz(c) 430 end do 431 cval(3) = cval(3) * sdim / size(xyz) 432 PetscCallA(DMPlexVecSetClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval, INSERT_ALL_VALUES,ierr)) 433 PetscCallA(DMPlexVecRestoreClosure(dmS, PETSC_NULL_SECTION, S, cellID(cell), cval,ierr)) 434 PetscCallA(DMPlexVecRestoreClosure(dmS, coordSection, coord, cellID(cell), xyz,ierr)) 435 end do 436 PetscCallA(ISRestoreIndicesF90(cellIS, cellID,ierr)) 437 PetscCallA(ISDestroy(cellIS,ierr)) 438 end do 439 PetscCallA(ISRestoreIndicesF90(csIS, csID,ierr)) 440 PetscCallA(ISDestroy(csIS,ierr)) 441 PetscCallA(VecViewFromOptions(S, PETSC_NULL_OPTIONS, '-s_vec_view',ierr)) 442 443 ! Writing zonal variables in Exodus file 444 PetscCallA(DMSetOutputSequenceNumber(dmS,0_kPI,time,ierr)) 445 PetscCallA(VecView(S,viewer,ierr)) 446 447 ! Reading zonal variables in Exodus file */ 448 PetscCallA(DMGetGlobalVector(dmS, tmpVec,ierr)) 449 PetscCallA(VecSet(tmpVec, -1000.0_kPR,ierr)) 450 PetscCallA(PetscObjectSetName(tmpVec, 'Sigma',ierr)) 451 PetscCallA(VecLoad(tmpVec,viewer,ierr)) 452 PetscCallA(VecAXPY(S, -1.0_kPR, tmpVec,ierr)) 453 PetscCallA(VecNorm(S, NORM_INFINITY,norm,ierr)) 454 if (norm > PETSC_SQRT_MACHINE_EPSILON) then 455 write(IOBuffer,'("Sigma ||Vin - Vout|| = ",ES12.5)') norm 456 end if 457 PetscCallA(DMRestoreGlobalVector(dmS, tmpVec,ierr)) 458 459 PetscCallA(DMRestoreGlobalVector(dmUA2, UA2,ierr)) 460 PetscCallA(DMRestoreGlobalVector(dmUA, UA,ierr)) 461 PetscCallA(DMRestoreGlobalVector(dmS, S,ierr)) 462 PetscCallA(DMRestoreGlobalVector(dmA, A,ierr)) 463 PetscCallA(DMRestoreGlobalVector(dmU, U,ierr)) 464 PetscCallA(DMRestoreGlobalVector(pdm, X,ierr)) 465 PetscCallA(DMDestroy(dmU,ierr)) 466 PetscCallA(ISDestroy(isU,ierr)) 467 PetscCallA(DMDestroy(dmA,ierr)) 468 PetscCallA(ISDestroy(isA,ierr)) 469 PetscCallA(DMDestroy(dmS,ierr)) 470 PetscCallA(ISDestroy(isS,ierr)) 471 PetscCallA(DMDestroy(dmUA,ierr)) 472 PetscCallA(ISDestroy(isUA,ierr)) 473 PetscCallA(DMDestroy(dmUA2,ierr)) 474 PetscCallA(DMDestroy(pdm,ierr)) 475 if (numProc > 1) then 476 PetscCallA(DMDestroy(dm,ierr)) 477 end if 478 479 deallocate(pStartDepth) 480 deallocate(pEndDepth) 481 482 PetscCallA(PetscViewerDestroy(viewer,ierr)) 483 PetscCallA(PetscFinalize(ierr)) 484end program ex62f90 485 486! /*TEST 487! 488! build: 489! requires: exodusii pnetcdf !complex 490! # 2D seq 491! test: 492! suffix: 0 493! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 494! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 495! test: 496! suffix: 1 497! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 498! 499! test: 500! suffix: 2 501! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 502! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 503! test: 504! suffix: 3 505! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 506! test: 507! suffix: 4 508! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 509! test: 510! suffix: 5 511! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 512! # 2D par 513! test: 514! suffix: 6 515! nsize: 2 516! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 517! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 518! test: 519! suffix: 7 520! nsize: 2 521! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 522! test: 523! suffix: 8 524! nsize: 2 525! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 526! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: invalid dimension ID or name 527! test: 528! suffix: 9 529! nsize: 2 530! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareT-large.exo -o FourSquareT-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 531! test: 532! suffix: 10 533! nsize: 2 534! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareQ-large.exo -o FourSquareQ-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 535! test: 536! # Something is now broken with parallel read/write for whatever shape H is 537! TODO: broken 538! suffix: 11 539! nsize: 2 540! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourSquareH-large.exo -o FourSquareH-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 541 542! #3d seq 543! test: 544! suffix: 12 545! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 546! test: 547! suffix: 13 548! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 549! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 550! test: 551! suffix: 14 552! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 553! test: 554! suffix: 15 555! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 556! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 557! #3d par 558! test: 559! suffix: 16 560! nsize: 2 561! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 562! test: 563! suffix: 17 564! nsize: 2 565! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 1 566! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 567! test: 568! suffix: 18 569! nsize: 2 570! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickHex-large.exo -o FourBrickHex-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 571! test: 572! suffix: 19 573! nsize: 2 574! args: -i ${wPETSC_DIR}/share/petsc/datafiles/meshes/FourBrickTet-large.exo -o FourBrickTet-large_out.exo -dm_view -dm_section_view -petscpartitioner_type simple -order 2 575! #TODO: bug in PetscCallA(to NetCDF failed to complete invalid type definition in file id 65536 NetCDF: One or more variable sizes violate format constraints 576 577! TEST*/ 578