1 // Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at 2 // the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights 3 // reserved. See files LICENSE and NOTICE for details. 4 // 5 // This file is part of CEED, a collection of benchmarks, miniapps, software 6 // libraries and APIs for efficient high-order finite element and spectral 7 // element discretizations for exascale applications. For more information and 8 // source code availability see http://github.com/ceed. 9 // 10 // The CEED research is supported by the Exascale Computing Project 17-SC-20-SC, 11 // a collaborative effort of two U.S. Department of Energy organizations (Office 12 // of Science and the National Nuclear Security Administration) responsible for 13 // the planning and preparation of a capable exascale ecosystem, including 14 // software, applications, hardware, advanced system engineering and early 15 // testbed platforms, in support of the nation's exascale computing imperative. 16 17 // libCEED + PETSc Example: CEED BPs 18 // 19 // This example demonstrates a simple usage of libCEED with PETSc to solve the 20 // CEED BP benchmark problems, see http://ceed.exascaleproject.org/bps. 21 // 22 // The code uses higher level communication protocols in DMPlex. 23 // 24 // Build with: 25 // 26 // make bps [PETSC_DIR=</path/to/petsc>] [CEED_DIR=</path/to/libceed>] 27 // 28 // Sample runs: 29 // 30 // ./bps -problem bp1 -degree 3 31 // ./bps -problem bp2 -ceed /cpu/self -degree 3 32 // ./bps -problem bp3 -ceed /gpu/occa -degree 3 33 // ./bps -problem bp4 -ceed /cpu/occa -degree 3 34 // ./bps -problem bp5 -ceed /omp/occa -degree 3 35 // ./bps -problem bp6 -ceed /ocl/occa -degree 3 36 // 37 //TESTARGS -ceed {ceed_resource} -test -problem bp5 -degree 3 38 39 /// @file 40 /// CEED BPs example using PETSc with DMPlex 41 /// See bpsraw.c for a "raw" implementation using a structured grid. 42 const char help[] = "Solve CEED BPs using PETSc with DMPlex\n"; 43 44 #include <stdbool.h> 45 #include <string.h> 46 #include <petscksp.h> 47 #include <petscdmplex.h> 48 #include <ceed.h> 49 #include "setup.h" 50 51 int main(int argc, char **argv) { 52 PetscInt ierr; 53 MPI_Comm comm; 54 char filename[PETSC_MAX_PATH_LEN], 55 ceedresource[PETSC_MAX_PATH_LEN] = "/cpu/self"; 56 double my_rt_start, my_rt, rt_min, rt_max; 57 PetscInt degree = 3, qextra, lsize, gsize, dim = 3, melem[3] = {3, 3, 3}, 58 ncompu = 1, xlsize, localnodes, lelem; 59 PetscScalar *r; 60 PetscBool test_mode, benchmark_mode, read_mesh, write_solution, 61 userlnodes = PETSC_FALSE; 62 PetscLogStage solvestage; 63 Vec X, Xloc, rhs, rhsloc; 64 Mat matO; 65 KSP ksp; 66 DM dm; 67 UserO userO; 68 Ceed ceed; 69 CeedData ceeddata; 70 CeedQFunction qferror; 71 CeedOperator operror; 72 CeedVector rhsceed, target; 73 CeedMemType memtyperequested; 74 bpType bpchoice; 75 76 // Check PETSc CUDA support 77 PetscBool petschavecuda, setmemtyperequest = PETSC_FALSE; 78 // *INDENT-OFF* 79 #ifdef PETSC_HAVE_CUDA 80 petschavecuda = PETSC_TRUE; 81 #else 82 petschavecuda = PETSC_FALSE; 83 #endif 84 // *INDENT-ON* 85 86 ierr = PetscInitialize(&argc, &argv, NULL, help); 87 if (ierr) return ierr; 88 comm = PETSC_COMM_WORLD; 89 90 // Read command line options 91 ierr = PetscOptionsBegin(comm, NULL, "CEED BPs in PETSc", NULL); CHKERRQ(ierr); 92 bpchoice = CEED_BP1; 93 ierr = PetscOptionsEnum("-problem", 94 "CEED benchmark problem to solve", NULL, 95 bpTypes, (PetscEnum)bpchoice, (PetscEnum *)&bpchoice, 96 NULL); CHKERRQ(ierr); 97 ncompu = bpOptions[bpchoice].ncompu; 98 test_mode = PETSC_FALSE; 99 ierr = PetscOptionsBool("-test", 100 "Testing mode (do not print unless error is large)", 101 NULL, test_mode, &test_mode, NULL); CHKERRQ(ierr); 102 benchmark_mode = PETSC_FALSE; 103 ierr = PetscOptionsBool("-benchmark", 104 "Benchmarking mode (prints benchmark statistics)", 105 NULL, benchmark_mode, &benchmark_mode, NULL); 106 CHKERRQ(ierr); 107 write_solution = PETSC_FALSE; 108 ierr = PetscOptionsBool("-write_solution", 109 "Write solution for visualization", 110 NULL, write_solution, &write_solution, NULL); 111 CHKERRQ(ierr); 112 degree = test_mode ? 3 : 2; 113 ierr = PetscOptionsInt("-degree", "Polynomial degree of tensor product basis", 114 NULL, degree, °ree, NULL); CHKERRQ(ierr); 115 qextra = bpOptions[bpchoice].qextra; 116 ierr = PetscOptionsInt("-qextra", "Number of extra quadrature points", 117 NULL, qextra, &qextra, NULL); CHKERRQ(ierr); 118 ierr = PetscOptionsString("-ceed", "CEED resource specifier", 119 NULL, ceedresource, ceedresource, 120 sizeof(ceedresource), NULL); CHKERRQ(ierr); 121 read_mesh = PETSC_FALSE; 122 ierr = PetscOptionsString("-mesh", "Read mesh from file", NULL, 123 filename, filename, sizeof(filename), &read_mesh); 124 CHKERRQ(ierr); 125 if (!read_mesh) { 126 PetscInt tmp = dim; 127 ierr = PetscOptionsIntArray("-cells", "Number of cells per dimension", NULL, 128 melem, &tmp, NULL); CHKERRQ(ierr); 129 } 130 memtyperequested = petschavecuda ? CEED_MEM_DEVICE : CEED_MEM_HOST; 131 ierr = PetscOptionsEnum("-memtype", 132 "CEED MemType requested", NULL, 133 memTypes, (PetscEnum)memtyperequested, 134 (PetscEnum *)&memtyperequested, &setmemtyperequest); 135 CHKERRQ(ierr); 136 localnodes = 1000; 137 ierr = PetscOptionsInt("-local_nodes", 138 "Target number of locally owned nodes per process", 139 NULL, localnodes, &localnodes, &userlnodes); 140 CHKERRQ(ierr); 141 142 ierr = PetscOptionsEnd(); CHKERRQ(ierr); 143 144 // Setup DM 145 if (read_mesh) { 146 ierr = DMPlexCreateFromFile(PETSC_COMM_WORLD, filename, PETSC_TRUE, &dm); 147 CHKERRQ(ierr); 148 } else { 149 if (userlnodes) { 150 // Find a nicely composite number of elements no less than lnodes 151 for (lelem = PetscMax(1, localnodes / (degree*degree*degree)); ; 152 lelem++) { 153 Split3(lelem, melem, true); 154 if (Max3(melem) / Min3(melem) <= 2) break; 155 } 156 } else { 157 lelem = melem[0]*melem[1]*melem[2]; 158 } 159 ierr = DMPlexCreateBoxMesh(PETSC_COMM_WORLD, dim, PETSC_FALSE, melem, NULL, 160 NULL, NULL, PETSC_TRUE, &dm); CHKERRQ(ierr); 161 } 162 163 { 164 DM dmDist = NULL; 165 PetscPartitioner part; 166 167 ierr = DMPlexGetPartitioner(dm, &part); CHKERRQ(ierr); 168 ierr = PetscPartitionerSetFromOptions(part); CHKERRQ(ierr); 169 ierr = DMPlexDistribute(dm, 0, NULL, &dmDist); CHKERRQ(ierr); 170 if (dmDist) { 171 ierr = DMDestroy(&dm); CHKERRQ(ierr); 172 dm = dmDist; 173 } 174 } 175 176 // Set up libCEED 177 CeedInit(ceedresource, &ceed); 178 CeedMemType memtypebackend; 179 CeedGetPreferredMemType(ceed, &memtypebackend); 180 181 // Check memtype compatibility 182 if (!setmemtyperequest) 183 memtyperequested = memtypebackend; 184 else if (!petschavecuda && memtyperequested == CEED_MEM_DEVICE) 185 SETERRQ1(PETSC_COMM_WORLD, PETSC_ERR_SUP_SYS, 186 "PETSc was not built with CUDA. " 187 "Requested MemType CEED_MEM_DEVICE is not supported.", NULL); 188 189 // Create DM 190 ierr = SetupDMByDegree(dm, degree, ncompu, bpchoice); 191 CHKERRQ(ierr); 192 193 // Create vectors 194 if (memtyperequested == CEED_MEM_DEVICE) { 195 ierr = DMSetVecType(dm, VECCUDA); CHKERRQ(ierr); 196 } 197 ierr = DMCreateGlobalVector(dm, &X); CHKERRQ(ierr); 198 ierr = VecGetLocalSize(X, &lsize); CHKERRQ(ierr); 199 ierr = VecGetSize(X, &gsize); CHKERRQ(ierr); 200 ierr = DMCreateLocalVector(dm, &Xloc); CHKERRQ(ierr); 201 ierr = VecGetSize(Xloc, &xlsize); CHKERRQ(ierr); 202 ierr = VecDuplicate(X, &rhs); CHKERRQ(ierr); 203 204 // Operator 205 ierr = PetscMalloc1(1, &userO); CHKERRQ(ierr); 206 ierr = MatCreateShell(comm, lsize, lsize, gsize, gsize, 207 userO, &matO); CHKERRQ(ierr); 208 ierr = MatShellSetOperation(matO, MATOP_MULT, 209 (void(*)(void))MatMult_Ceed); CHKERRQ(ierr); 210 ierr = MatShellSetOperation(matO, MATOP_GET_DIAGONAL, 211 (void(*)(void))MatGetDiag); CHKERRQ(ierr); 212 if (memtyperequested == CEED_MEM_DEVICE) { 213 ierr = MatShellSetVecType(matO, VECCUDA); CHKERRQ(ierr); 214 } 215 216 // Print summary 217 if (!test_mode) { 218 PetscInt P = degree + 1, Q = P + qextra; 219 220 const char *usedresource; 221 CeedGetResource(ceed, &usedresource); 222 223 VecType vectype; 224 ierr = VecGetType(X, &vectype); CHKERRQ(ierr); 225 226 ierr = PetscPrintf(comm, 227 "\n-- CEED Benchmark Problem %d -- libCEED + PETSc --\n" 228 " PETSc:\n" 229 " PETSc Vec Type : %s\n" 230 " libCEED:\n" 231 " libCEED Backend : %s\n" 232 " libCEED Backend MemType : %s\n" 233 " libCEED User Requested MemType : %s\n" 234 " Mesh:\n" 235 " Number of 1D Basis Nodes (p) : %d\n" 236 " Number of 1D Quadrature Points (q) : %d\n" 237 " Global nodes : %D\n" 238 " Owned nodes : %D\n" 239 " DoF per node : %D\n" 240 " Owned elements : %D\n", 241 bpchoice+1, vectype, usedresource, 242 CeedMemTypes[memtypebackend], 243 (setmemtyperequest) ? 244 CeedMemTypes[memtyperequested] : "none", 245 P, Q, gsize/ncompu, lsize/ncompu, ncompu, lelem); 246 CHKERRQ(ierr); 247 } 248 249 // Create RHS vector 250 ierr = VecDuplicate(Xloc, &rhsloc); CHKERRQ(ierr); 251 ierr = VecZeroEntries(rhsloc); CHKERRQ(ierr); 252 if (memtyperequested == CEED_MEM_HOST) { 253 ierr = VecGetArray(rhsloc, &r); CHKERRQ(ierr); 254 } else { 255 ierr = VecCUDAGetArray(rhsloc, &r); CHKERRQ(ierr); 256 } 257 CeedVectorCreate(ceed, xlsize, &rhsceed); 258 CeedVectorSetArray(rhsceed, memtyperequested, CEED_USE_POINTER, r); 259 260 ierr = PetscMalloc1(1, &ceeddata); CHKERRQ(ierr); 261 ierr = SetupLibceedByDegree(dm, ceed, degree, dim, qextra, 262 ncompu, gsize, xlsize, bpchoice, ceeddata, 263 true, rhsceed, &target); CHKERRQ(ierr); 264 265 // Gather RHS 266 CeedVectorSyncArray(rhsceed, memtyperequested); 267 if (memtyperequested == CEED_MEM_HOST) { 268 ierr = VecRestoreArray(rhsloc, &r); CHKERRQ(ierr); 269 } else { 270 ierr = VecCUDARestoreArray(rhsloc, &r); CHKERRQ(ierr); 271 } 272 ierr = VecZeroEntries(rhs); CHKERRQ(ierr); 273 ierr = DMLocalToGlobalBegin(dm, rhsloc, ADD_VALUES, rhs); CHKERRQ(ierr); 274 ierr = DMLocalToGlobalEnd(dm, rhsloc, ADD_VALUES, rhs); CHKERRQ(ierr); 275 CeedVectorDestroy(&rhsceed); 276 277 // Create the error Q-function 278 CeedQFunctionCreateInterior(ceed, 1, bpOptions[bpchoice].error, 279 bpOptions[bpchoice].errorfname, &qferror); 280 CeedQFunctionAddInput(qferror, "u", ncompu, CEED_EVAL_INTERP); 281 CeedQFunctionAddInput(qferror, "true_soln", ncompu, CEED_EVAL_NONE); 282 CeedQFunctionAddOutput(qferror, "error", ncompu, CEED_EVAL_NONE); 283 284 // Create the error operator 285 CeedOperatorCreate(ceed, qferror, CEED_QFUNCTION_NONE, CEED_QFUNCTION_NONE, 286 &operror); 287 CeedOperatorSetField(operror, "u", ceeddata->Erestrictu, 288 ceeddata->basisu, CEED_VECTOR_ACTIVE); 289 CeedOperatorSetField(operror, "true_soln", ceeddata->Erestrictui, 290 CEED_BASIS_COLLOCATED, target); 291 CeedOperatorSetField(operror, "error", ceeddata->Erestrictui, 292 CEED_BASIS_COLLOCATED, CEED_VECTOR_ACTIVE); 293 294 // Set up Mat 295 userO->comm = comm; 296 userO->dm = dm; 297 userO->Xloc = Xloc; 298 ierr = VecDuplicate(Xloc, &userO->Yloc); CHKERRQ(ierr); 299 userO->xceed = ceeddata->xceed; 300 userO->yceed = ceeddata->yceed; 301 userO->op = ceeddata->opapply; 302 userO->ceed = ceed; 303 userO->memtype = memtyperequested; 304 if (memtyperequested == CEED_MEM_HOST) { 305 userO->VecGetArray = VecGetArray; 306 userO->VecGetArrayRead = VecGetArrayRead; 307 userO->VecRestoreArray = VecRestoreArray; 308 userO->VecRestoreArrayRead = VecRestoreArrayRead; 309 } else { 310 userO->VecGetArray = VecCUDAGetArray; 311 userO->VecGetArrayRead = VecCUDAGetArrayRead; 312 userO->VecRestoreArray = VecCUDARestoreArray; 313 userO->VecRestoreArrayRead = VecCUDARestoreArrayRead; 314 } 315 316 ierr = KSPCreate(comm, &ksp); CHKERRQ(ierr); 317 { 318 PC pc; 319 ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr); 320 if (bpchoice == CEED_BP1 || bpchoice == CEED_BP2) { 321 ierr = PCSetType(pc, PCJACOBI); CHKERRQ(ierr); 322 ierr = PCJacobiSetType(pc, PC_JACOBI_ROWSUM); CHKERRQ(ierr); 323 } else { 324 ierr = PCSetType(pc, PCNONE); CHKERRQ(ierr); 325 } 326 ierr = KSPSetType(ksp, KSPCG); CHKERRQ(ierr); 327 ierr = KSPSetNormType(ksp, KSP_NORM_NATURAL); CHKERRQ(ierr); 328 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 329 PETSC_DEFAULT); CHKERRQ(ierr); 330 } 331 ierr = KSPSetOperators(ksp, matO, matO); CHKERRQ(ierr); 332 333 // First run, if benchmarking 334 if (benchmark_mode) { 335 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 1); 336 CHKERRQ(ierr); 337 my_rt_start = MPI_Wtime(); 338 ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr); 339 my_rt = MPI_Wtime() - my_rt_start; 340 ierr = MPI_Allreduce(MPI_IN_PLACE, &my_rt, 1, MPI_DOUBLE, MPI_MIN, comm); 341 CHKERRQ(ierr); 342 // Set maxits based on first iteration timing 343 if (my_rt > 0.02) { 344 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 5); 345 CHKERRQ(ierr); 346 } else { 347 ierr = KSPSetTolerances(ksp, 1e-10, PETSC_DEFAULT, PETSC_DEFAULT, 20); 348 CHKERRQ(ierr); 349 } 350 } 351 ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr); 352 353 // Timed solve 354 ierr = VecZeroEntries(X); CHKERRQ(ierr); 355 ierr = PetscBarrier((PetscObject)ksp); CHKERRQ(ierr); 356 357 // -- Performance logging 358 ierr = PetscLogStageRegister("Solve Stage", &solvestage); CHKERRQ(ierr); 359 ierr = PetscLogStagePush(solvestage); CHKERRQ(ierr); 360 361 // -- Solve 362 my_rt_start = MPI_Wtime(); 363 ierr = KSPSolve(ksp, rhs, X); CHKERRQ(ierr); 364 my_rt = MPI_Wtime() - my_rt_start; 365 366 // -- Performance logging 367 ierr = PetscLogStagePop(); 368 369 // Output results 370 { 371 KSPType ksptype; 372 KSPConvergedReason reason; 373 PetscReal rnorm; 374 PetscInt its; 375 ierr = KSPGetType(ksp, &ksptype); CHKERRQ(ierr); 376 ierr = KSPGetConvergedReason(ksp, &reason); CHKERRQ(ierr); 377 ierr = KSPGetIterationNumber(ksp, &its); CHKERRQ(ierr); 378 ierr = KSPGetResidualNorm(ksp, &rnorm); CHKERRQ(ierr); 379 if (!test_mode || reason < 0 || rnorm > 1e-8) { 380 ierr = PetscPrintf(comm, 381 " KSP:\n" 382 " KSP Type : %s\n" 383 " KSP Convergence : %s\n" 384 " Total KSP Iterations : %D\n" 385 " Final rnorm : %e\n", 386 ksptype, KSPConvergedReasons[reason], its, 387 (double)rnorm); CHKERRQ(ierr); 388 } 389 if (!test_mode) { 390 ierr = PetscPrintf(comm," Performance:\n"); CHKERRQ(ierr); 391 } 392 { 393 PetscReal maxerror; 394 ierr = ComputeErrorMax(userO, operror, X, target, &maxerror); 395 CHKERRQ(ierr); 396 PetscReal tol = 5e-2; 397 if (!test_mode || maxerror > tol) { 398 ierr = MPI_Allreduce(&my_rt, &rt_min, 1, MPI_DOUBLE, MPI_MIN, comm); 399 CHKERRQ(ierr); 400 ierr = MPI_Allreduce(&my_rt, &rt_max, 1, MPI_DOUBLE, MPI_MAX, comm); 401 CHKERRQ(ierr); 402 ierr = PetscPrintf(comm, 403 " Pointwise Error (max) : %e\n" 404 " CG Solve Time : %g (%g) sec\n", 405 (double)maxerror, rt_max, rt_min); CHKERRQ(ierr); 406 } 407 } 408 if (benchmark_mode && (!test_mode)) { 409 ierr = PetscPrintf(comm, 410 " DoFs/Sec in CG : %g (%g) million\n", 411 1e-6*gsize*its/rt_max, 412 1e-6*gsize*its/rt_min); CHKERRQ(ierr); 413 } 414 } 415 416 if (write_solution) { 417 PetscViewer vtkviewersoln; 418 419 ierr = PetscViewerCreate(comm, &vtkviewersoln); CHKERRQ(ierr); 420 ierr = PetscViewerSetType(vtkviewersoln, PETSCVIEWERVTK); CHKERRQ(ierr); 421 ierr = PetscViewerFileSetName(vtkviewersoln, "solution.vtk"); CHKERRQ(ierr); 422 ierr = VecView(X, vtkviewersoln); CHKERRQ(ierr); 423 ierr = PetscViewerDestroy(&vtkviewersoln); CHKERRQ(ierr); 424 } 425 426 // Cleanup 427 ierr = VecDestroy(&X); CHKERRQ(ierr); 428 ierr = VecDestroy(&Xloc); CHKERRQ(ierr); 429 ierr = VecDestroy(&userO->Yloc); CHKERRQ(ierr); 430 ierr = MatDestroy(&matO); CHKERRQ(ierr); 431 ierr = PetscFree(userO); CHKERRQ(ierr); 432 ierr = CeedDataDestroy(0, ceeddata); CHKERRQ(ierr); 433 ierr = DMDestroy(&dm); CHKERRQ(ierr); 434 435 ierr = VecDestroy(&rhs); CHKERRQ(ierr); 436 ierr = VecDestroy(&rhsloc); CHKERRQ(ierr); 437 ierr = KSPDestroy(&ksp); CHKERRQ(ierr); 438 CeedVectorDestroy(&target); 439 CeedQFunctionDestroy(&qferror); 440 CeedOperatorDestroy(&operror); 441 CeedDestroy(&ceed); 442 return PetscFinalize(); 443 } 444