1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at 2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights 3 // reserved. See files LICENSE and NOTICE for details. 4 // 5 // This file is part of CEED, a collection of benchmarks, miniapps, software 6 // libraries and APIs for efficient high-order finite element and spectral 7 // element discretizations for exascale applications. For more information and 8 // source code availability see http://github.com/ceed. 9 // 10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC, 11 // a collaborative effort of two U.S. Department of Energy organizations (Office 12 // of Science and the National Nuclear Security Administration) responsible for 13 // the planning and preparation of a capable exascale ecosystem, including 14 // software, applications, hardware, advanced system engineering and early 15 // testbed platforms, in support of the nation's exascale computing imperative. 16 17 // libCEED + PETSc Example: CEED BPs 18 // 19 // This example demonstrates a simple usage of libCEED with PETSc to solve the 20 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps. 21 // 22 // The code uses higher level communication protocols in DMPlex. 23 // 24 // Build with: 25 // 26 // make bps [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>] 27 // 28 // Sample runs: 29 // 30 // ./bps -problem bp1 -degree 3 31 // ./bps -problem bp2 -ceed /cpu/self -degree 3 32 // ./bps -problem bp3 -ceed /gpu/occa -degree 3 33 // ./bps -problem bp4 -ceed /cpu/occa -degree 3 34 // ./bps -problem bp5 -ceed /omp/occa -degree 3 35 // ./bps -problem bp6 -ceed /ocl/occa -degree 3 36 // 37 //TESTARGS -ceed {ceed_resource} -test -problem bp5 -degree 3 38 39 /// @file 40 /// CEED BPs example using PETSc with DMPlex 41 /// See bpsraw.c for a "raw" implementation using a structured grid. 42 const char help[] = "Solve CEED BPs using PETSc with DMPlex\n"; 43 44 #include <stdbool.h> 45 #include <string.h> 46 #include <petscksp.h> 47 #include <petscdmplex.h> 48 #include <ceed.h> 49 #include "setup.h" 50 51 // ----------------------------------------------------------------------------- 52 // Utilities 53 // ----------------------------------------------------------------------------- 54 55 // Utility function, compute three factors of an integer 56 static void Split3(PetscInt size, PetscInt m[3], bool reverse) { 57 for (PetscInt d=0,sizeleft=size; d<3; d++) { 58 PetscInt try = (PetscInt)PetscCeilReal(PetscPowReal(sizeleft, 1./(3 - d))); 59 while (try * (sizeleft / try) != sizeleft) try++; 60 m[reverse ? 2-d : d] = try; 61 sizeleft /= try; 62 } 63 } 64 65 static int Max3(const PetscInt a[3]) { 66 return PetscMax(a[0], PetscMax(a[1], a[2])); 67 } 68 69 static int Min3(const PetscInt a[3]) { 70 return PetscMin(a[0], PetscMin(a[1], a[2])); 71 } 72 73 int main(int argc, char **argv) { 74 PetscInt ierr; 75 MPI_Comm comm; 76 char filename[PETSC_MAX_PATH_LEN], 77 ceedresource[PETSC_MAX_PATH_LEN] = "/cpu/self"; 78 double my_rt_start, my_rt, rt_min, rt_max; 79 PetscInt degree = 3, qextra, lsize, gsize, dim = 3, melem[3] = {3, 3, 3}, 80 ncompu = 1, xlsize, localnodes, lelem; 81 PetscScalar *r; 82 PetscBool test_mode, benchmark_mode, read_mesh, write_solution, 83 userlnodes = PETSC_FALSE; 84 PetscLogStage solvestage; 85 Vec X, Xloc, rhs, rhsloc; 86 Mat matO; 87 KSP ksp; 88 DM dm; 89 UserO userO; 90 Ceed ceed; 91 CeedData ceeddata; 92 CeedQFunction qferror; 93 CeedOperator operror; 94 CeedVector rhsceed, target; 95 CeedMemType memtyperequested; 96 bpType bpchoice; 97 98 // Check PETSc CUDA support 99 PetscBool petschavecuda, setmemtyperequest = PETSC_FALSE; 100 // *INDENT-OFF* 101 #ifdef PETSC_HAVE_CUDA 102 petschavecuda = PETSC_TRUE; 103 #else 104 petschavecuda = PETSC_FALSE; 105 #endif 106 // *INDENT-ON* 107 108 ierr = PetscInitialize(&argc, &argv, NULL, help); 109 if (ierr) return ierr; 110 comm = PETSC_COMM_WORLD; 111 112 // Read command line options 113 ierr = PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL); CHKERRQ(ierr); 114 bpchoice = CEED_BP1; 115 ierr = PetscOptionsEnum("-problem", 116 "CEED benchmark problem to solve", NULL, 117 bpTypes, (PetscEnum)bpchoice, (PetscEnum *)&bpchoice, 118 NULL); CHKERRQ(ierr); 119 ncompu = bpOptions[bpchoice].ncompu; 120 test_mode = PETSC_FALSE; 121 ierr = PetscOptionsBool("-test", 122 "Testing mode (do not print unless error is large)", 123 NULL, test_mode, &test_mode, NULL); CHKERRQ(ierr); 124 benchmark_mode = PETSC_FALSE; 125 ierr = PetscOptionsBool("-benchmark", 126 "Benchmarking mode (prints benchmark statistics)", 127 NULL, benchmark_mode, &benchmark_mode, NULL); 128 CHKERRQ(ierr); 129 write_solution = PETSC_FALSE; 130 ierr = PetscOptionsBool("-write_solution", 131 "Write solution for visualization", 132 NULL, write_solution, &write_solution, NULL); 133 CHKERRQ(ierr); 134 degree = test_mode ? 3 : 2; 135 ierr = PetscOptionsInt("-degree", "Polynomial degree of tensor product basis", 136 NULL, degree, °ree, NULL); CHKERRQ(ierr); 137 qextra = bpOptions[bpchoice].qextra; 138 ierr = PetscOptionsInt("-qextra", "Number of extra quadrature points", 139 NULL, qextra, &qextra, NULL); CHKERRQ(ierr); 140 ierr = PetscOptionsString("-ceed", "CEED resource specifier", 141 NULL, ceedresource, ceedresource, 142 sizeof(ceedresource), NULL); CHKERRQ(ierr); 143 read_mesh = PETSC_FALSE; 144 ierr = PetscOptionsString("-mesh", "Read mesh from file", NULL, 145 filename, filename, sizeof(filename), &read_mesh); 146 CHKERRQ(ierr); 147 if (!read_mesh) { 148 PetscInt tmp = dim; 149 ierr = PetscOptionsIntArray("-cells", "Number of cells per dimension", NULL, 150 melem, &tmp, NULL); CHKERRQ(ierr); 151 } 152 memtyperequested = petschavecuda ? CEED_MEM_DEVICE : CEED_MEM_HOST; 153 ierr = PetscOptionsEnum("-memtype", 154 "CEED MemType requested", NULL, 155 memTypes, (PetscEnum)memtyperequested, 156 (PetscEnum *)&memtyperequested, &setmemtyperequest); 157 CHKERRQ(ierr); 158 localnodes = 1000; 159 ierr = PetscOptionsInt("-local_nodes", 160 "Target number of locally owned nodes per process", 161 NULL, localnodes, &localnodes, &userlnodes); 162 CHKERRQ(ierr); 163 164 ierr = PetscOptionsEnd(); CHKERRQ(ierr); 165 166 // Setup DM 167 if (read_mesh) { 168 ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, filename, PETSC_TRUE, &dm); 169 CHKERRQ(ierr); 170 } else { 171 if (userlnodes) { 172 // Find a nicely composite number of elements no less than lnodes 173 for (lelem = PetscMax(1, localnodes / (degree*degree*degree)); ; 174 lelem++) { 175 Split3(lelem, melem, true); 176 if (Max3(melem) / Min3(melem) <= 2) break; 177 } 178 } else { 179 lelem = melem[0]*melem[1]*melem[2]; 180 } 181 ierr = DMPlexCreateBoxMesh(PETSC_COMM_WORLD, dim, PETSC_FALSE, melem, NULL, 182 NULL, NULL, PETSC_TRUE, &dm); CHKERRQ(ierr); 183 } 184 185 { 186 DM dmDist = NULL; 187 PetscPartitioner part; 188 189 ierr = DMPlexGetPartitioner(dm, &part); CHKERRQ(ierr); 190 ierr = PetscPartitionerSetFromOptions(part); CHKERRQ(ierr); 191 ierr = DMPlexDistribute(dm, 0, NULL, &dmDist); CHKERRQ(ierr); 192 if (dmDist) { 193 ierr = DMDestroy(&dm); CHKERRQ(ierr); 194 dm = dmDist; 195 } 196 } 197 198 // Set up libCEED 199 CeedInit(ceedresource, &ceed); 200 CeedMemType memtypebackend; 201 CeedGetPreferredMemType(ceed, &memtypebackend); 202 203 // Check memtype compatibility 204 if (!setmemtyperequest) 205 memtyperequested = memtypebackend; 206 else if (!petschavecuda && memtyperequested == CEED_MEM_DEVICE) 207 SETERRQ1(PETSC_COMM_WORLD, PETSC_ERR_SUP_SYS, 208 "PETSc was not built with CUDA. " 209 "Requested MemType CEED_MEM_DEVICE is not supported.", NULL); 210 211 // Create DM 212 ierr = SetupDMByDegree(dm, degree, ncompu, bpchoice); 213 CHKERRQ(ierr); 214 215 // Create vectors 216 if (memtyperequested == CEED_MEM_DEVICE) { 217 ierr = DMSetVecType(dm, VECCUDA); CHKERRQ(ierr); 218 } 219 ierr = DMCreateGlobalVector(dm, &X); CHKERRQ(ierr); 220 ierr = VecGetLocalSize(X, &lsize); CHKERRQ(ierr); 221 ierr = VecGetSize(X, &gsize); CHKERRQ(ierr); 222 ierr = DMCreateLocalVector(dm, &Xloc); CHKERRQ(ierr); 223 ierr = VecGetSize(Xloc, &xlsize); CHKERRQ(ierr); 224 ierr = VecDuplicate(X, &rhs); CHKERRQ(ierr); 225 226 // Operator 227 ierr = PetscMalloc1(1, &userO); CHKERRQ(ierr); 228 ierr = MatCreateShell(comm, lsize, lsize, gsize, gsize, 229 userO, &matO); CHKERRQ(ierr); 230 ierr = MatShellSetOperation(matO, MATOP_MULT, 231 (void(*)(void))MatMult_Ceed); CHKERRQ(ierr); 232 ierr = MatShellSetOperation(matO, MATOP_GET_DIAGONAL, 233 (void(*)(void))MatGetDiag); CHKERRQ(ierr); 234 if (memtyperequested == CEED_MEM_DEVICE) { 235 ierr = MatShellSetVecType(matO, VECCUDA); CHKERRQ(ierr); 236 } 237 238 // Print summary 239 if (!test_mode) { 240 PetscInt P = degree + 1, Q = P + qextra; 241 242 const char *usedresource; 243 CeedGetResource(ceed, &usedresource); 244 245 VecType vectype; 246 ierr = VecGetType(X, &vectype); CHKERRQ(ierr); 247 248 ierr = PetscPrintf(comm, 249 "\n-- CEED Benchmark Problem %d -- libCEED + PETSc --\n" 250 " PETSc:\n" 251 " PETSc Vec Type : %s\n" 252 " libCEED:\n" 253 " libCEED Backend : %s\n" 254 " libCEED Backend MemType : %s\n" 255 " libCEED User Requested MemType : %s\n" 256 " Mesh:\n" 257 " Number of 1D Basis Nodes (p) : %d\n" 258 " Number of 1D Quadrature Points (q) : %d\n" 259 " Global nodes : %D\n" 260 " Owned nodes : %D\n" 261 " DoF per node : %D\n" 262 " Owned elements : %D\n", 263 bpchoice+1, vectype, usedresource, 264 CeedMemTypes[memtypebackend], 265 (setmemtyperequest) ? 266 CeedMemTypes[memtyperequested] : "none", 267 P, Q, gsize/ncompu, lsize/ncompu, ncompu, lelem); 268 CHKERRQ(ierr); 269 } 270 271 // Create RHS vector 272 ierr = VecDuplicate(Xloc, &rhsloc); CHKERRQ(ierr); 273 ierr = VecZeroEntries(rhsloc); CHKERRQ(ierr); 274 if (memtyperequested == CEED_MEM_HOST) { 275 ierr = VecGetArray(rhsloc, &r); CHKERRQ(ierr); 276 } else { 277 ierr = VecCUDAGetArray(rhsloc, &r); CHKERRQ(ierr); 278 } 279 CeedVectorCreate(ceed, xlsize, &rhsceed); 280 CeedVectorSetArray(rhsceed, memtyperequested, CEED_USE_POINTER, r); 281 282 ierr = PetscMalloc1(1, &ceeddata); CHKERRQ(ierr); 283 ierr = SetupLibceedByDegree(dm, ceed, degree, dim, qextra, 284 ncompu, gsize, xlsize, bpchoice, ceeddata, 285 true, rhsceed, &target); CHKERRQ(ierr); 286 287 // Gather RHS 288 CeedVectorSyncArray(rhsceed, memtyperequested); 289 if (memtyperequested == CEED_MEM_HOST) { 290 ierr = VecRestoreArray(rhsloc, &r); CHKERRQ(ierr); 291 } else { 292 ierr = VecCUDARestoreArray(rhsloc, &r); CHKERRQ(ierr); 293 } 294 ierr = VecZeroEntries(rhs); CHKERRQ(ierr); 295 ierr = DMLocalToGlobalBegin(dm, rhsloc, ADD_VALUES, rhs); CHKERRQ(ierr); 296 ierr = DMLocalToGlobalEnd(dm, rhsloc, ADD_VALUES, rhs); CHKERRQ(ierr); 297 CeedVectorDestroy(&rhsceed); 298 299 // Create the error Q-function 300 CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpchoice].error, 301 bpOptions[bpchoice].errorfname, &qferror); 302 CeedQFunctionAddInput(qferror, "u", ncompu, CEED_EVAL_INTERP); 303 CeedQFunctionAddInput(qferror, "true_soln", ncompu, CEED_EVAL_NONE); 304 CeedQFunctionAddOutput(qferror, "error", ncompu, CEED_EVAL_NONE); 305 306 // Create the error operator 307 CeedOperatorCreate(ceed, qferror, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE, 308 &operror); 309 CeedOperatorSetField(operror, "u", ceeddata->Erestrictu, 310 ceeddata->basisu, CEED_VECTOR_ACTIVE); 311 CeedOperatorSetField(operror, "true_soln", ceeddata->Erestrictui, 312 CEED_BASIS_COLLOCATED, target); 313 CeedOperatorSetField(operror, "error", ceeddata->Erestrictui, 314 CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE); 315 316 // Set up Mat 317 userO->comm = comm; 318 userO->dm = dm; 319 userO->Xloc = Xloc; 320 ierr = VecDuplicate(Xloc, &userO->Yloc); CHKERRQ(ierr); 321 userO->xceed = ceeddata->xceed; 322 userO->yceed = ceeddata->yceed; 323 userO->op = ceeddata->opapply; 324 userO->ceed = ceed; 325 userO->memtype = memtyperequested; 326 if (memtyperequested == CEED_MEM_HOST) { 327 userO->VecGetArray = VecGetArray; 328 userO->VecGetArrayRead = VecGetArrayRead; 329 userO->VecRestoreArray = VecRestoreArray; 330 userO->VecRestoreArrayRead = VecRestoreArrayRead; 331 } else { 332 userO->VecGetArray = VecCUDAGetArray; 333 userO->VecGetArrayRead = VecCUDAGetArrayRead; 334 userO->VecRestoreArray = VecCUDARestoreArray; 335 userO->VecRestoreArrayRead = VecCUDARestoreArrayRead; 336 } 337 338 ierr = KSPCreate(comm, &ksp); CHKERRQ(ierr); 339 { 340 PC pc; 341 ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr); 342 if (bpchoice == CEED_BP1 || bpchoice == CEED_BP2) { 343 ierr = PCSetType(pc, PCJACOBI); CHKERRQ(ierr); 344 ierr = PCJacobiSetType(pc, PC_JACOBI_ROWSUM); CHKERRQ(ierr); 345 } else { 346 ierr = PCSetType(pc, PCNONE); CHKERRQ(ierr); 347 } 348 ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr); 349 ierr = KSPSetNormType(ksp, KSP_NORM_NATURAL); CHKERRQ(ierr); 350 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 351 PETSC_DEFAULT); CHKERRQ(ierr); 352 } 353 ierr = KSPSetOperators(ksp, matO, matO); CHKERRQ(ierr); 354 355 // First run, if benchmarking 356 if (benchmark_mode) { 357 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1); 358 CHKERRQ(ierr); 359 my_rt_start = MPI_Wtime(); 360 ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr); 361 my_rt = MPI_Wtime() - my_rt_start; 362 ierr = MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm); 363 CHKERRQ(ierr); 364 // Set maxits based on first iteration timing 365 if (my_rt > 0.02) { 366 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5); 367 CHKERRQ(ierr); 368 } else { 369 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20); 370 CHKERRQ(ierr); 371 } 372 } 373 ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr); 374 375 // Timed solve 376 ierr = VecZeroEntries(X); CHKERRQ(ierr); 377 ierr = PetscBarrier((PetscObject)ksp); CHKERRQ(ierr); 378 379 // -- Performance logging 380 ierr = PetscLogStageRegister("Solve Stage", &solvestage); CHKERRQ(ierr); 381 ierr = PetscLogStagePush(solvestage); CHKERRQ(ierr); 382 383 // -- Solve 384 my_rt_start = MPI_Wtime(); 385 ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr); 386 my_rt = MPI_Wtime() - my_rt_start; 387 388 // -- Performance logging 389 ierr = PetscLogStagePop(); 390 391 // Output results 392 { 393 KSPType ksptype; 394 KSPConvergedReason reason; 395 PetscReal rnorm; 396 PetscInt its; 397 ierr = KSPGetType(ksp, &ksptype); CHKERRQ(ierr); 398 ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr); 399 ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr); 400 ierr = KSPGetResidualNorm(ksp, &rnorm); CHKERRQ(ierr); 401 if (!test_mode || reason < 0 || rnorm > 1e-8) { 402 ierr = PetscPrintf(comm, 403 " KSP:\n" 404 " KSP Type : %s\n" 405 " KSP Convergence : %s\n" 406 " Total KSP Iterations : %D\n" 407 " Final rnorm : %e\n", 408 ksptype, KSPConvergedReasons[reason], its, 409 (double)rnorm); CHKERRQ(ierr); 410 } 411 if (!test_mode) { 412 ierr = PetscPrintf(comm," Performance:\n"); CHKERRQ(ierr); 413 } 414 { 415 PetscReal maxerror; 416 ierr = ComputeErrorMax(userO, operror, X, target, &maxerror); 417 CHKERRQ(ierr); 418 PetscReal tol = 5e-2; 419 if (!test_mode || maxerror > tol) { 420 ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm); 421 CHKERRQ(ierr); 422 ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm); 423 CHKERRQ(ierr); 424 ierr = PetscPrintf(comm, 425 " Pointwise Error (max) : %e\n" 426 " CG Solve Time : %g (%g) sec\n", 427 (double)maxerror, rt_max, rt_min); CHKERRQ(ierr); 428 } 429 } 430 if (benchmark_mode && (!test_mode)) { 431 ierr = PetscPrintf(comm, 432 " DoFs/Sec in CG : %g (%g) million\n", 433 1e-6*gsize*its/rt_max, 434 1e-6*gsize*its/rt_min); CHKERRQ(ierr); 435 } 436 } 437 438 if (write_solution) { 439 PetscViewer vtkviewersoln; 440 441 ierr = PetscViewerCreate(comm, &vtkviewersoln); CHKERRQ(ierr); 442 ierr = PetscViewerSetType(vtkviewersoln, PETSCVIEWERVTK); CHKERRQ(ierr); 443 ierr = PetscViewerFileSetName(vtkviewersoln, "solution.vtk"); CHKERRQ(ierr); 444 ierr = VecView(X, vtkviewersoln); CHKERRQ(ierr); 445 ierr = PetscViewerDestroy(&vtkviewersoln); CHKERRQ(ierr); 446 } 447 448 // Cleanup 449 ierr = VecDestroy(&X); CHKERRQ(ierr); 450 ierr = VecDestroy(&Xloc); CHKERRQ(ierr); 451 ierr = VecDestroy(&userO->Yloc); CHKERRQ(ierr); 452 ierr = MatDestroy(&matO); CHKERRQ(ierr); 453 ierr = PetscFree(userO); CHKERRQ(ierr); 454 ierr = CeedDataDestroy(0, ceeddata); CHKERRQ(ierr); 455 ierr = DMDestroy(&dm); CHKERRQ(ierr); 456 457 ierr = VecDestroy(&rhs); CHKERRQ(ierr); 458 ierr = VecDestroy(&rhsloc); CHKERRQ(ierr); 459 ierr = KSPDestroy(&ksp); CHKERRQ(ierr); 460 CeedVectorDestroy(&target); 461 CeedQFunctionDestroy(&qferror); 462 CeedOperatorDestroy(&operror); 463 CeedDestroy(&ceed); 464 return PetscFinalize(); 465 } 466