1 /* 2 DMPatch, for domains covered by sets of patches. 3 */ 4 #ifndef PETSCDMPATCH_H 5 #define PETSCDMPATCH_H 6 7 #include <petscdm.h> 8 9 /* SUBMANSEC = DMPatch */ 10 11 /*S 12 DMPATCH - `DM` object that encapsulates a domain divided into many patches 13 14 Level: intermediate 15 16 .seealso: `DM`, `DMPatchCreate()`, `DMPatchSolve()`, `DMPatchZoom()`, `DMPatchGetPatchSize()`, `DMPatchSetPatchSize()`, 17 `DMPatchGetCommSize()`, `DMPatchSetCommSize()`, `DMPatchGetCoarse()`, `DMPatchCreateGrid()` 18 S*/ 19 PETSC_EXTERN PetscErrorCode DMPatchCreate(MPI_Comm, DM *); 20 21 PETSC_EXTERN PetscErrorCode DMPatchZoom(DM, MatStencil, MatStencil, MPI_Comm, DM *, PetscSF *, PetscSF *); 22 PETSC_EXTERN PetscErrorCode DMPatchSolve(DM); 23 PETSC_EXTERN PetscErrorCode DMPatchGetPatchSize(DM, MatStencil *); 24 PETSC_EXTERN PetscErrorCode DMPatchSetPatchSize(DM, MatStencil); 25 PETSC_EXTERN PetscErrorCode DMPatchGetCommSize(DM, MatStencil *); 26 PETSC_EXTERN PetscErrorCode DMPatchSetCommSize(DM, MatStencil); 27 PETSC_EXTERN PetscErrorCode DMPatchGetCoarse(DM, DM *); 28 PETSC_EXTERN PetscErrorCode DMPatchCreateGrid(MPI_Comm, PetscInt, MatStencil, MatStencil, MatStencil, DM *); 29 30 /* 31 * We want each patch to consist of an entire DM, DMDA at first 32 - We cannot afford to store much more than the data from a single patch in memory 33 - No global PetscSection, only PetscLayout 34 - Optional scatters 35 * There is a storable coarse level, which will also be a traditional DM (DMDA here) 36 * The local and global vectors correspond to a ghosted patch 37 * Need a way to activate a patch 38 * Jack in sizes for l/g vectors 39 - Need routine for viewing a full global vector 40 - Jed handles solver 41 */ 42 43 #endif 44