xref: /petsc/include/petscdmpatch.h (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 /*
2   DMPatch, for domains covered by sets of patches.
3 */
4 #if !defined(__PETSCDMPATCH_H)
5 #define __PETSCDMPATCH_H
6 #include <petscdm.h>
7 
8 /*S
9   DMPATCH - DM object that encapsulates a domain divided into many patches
10 
11   Level: intermediate
12 
13   Concepts: grids, grid refinement
14 
15 .seealso:  DM, DMPatchCreate()
16 S*/
17 PETSC_EXTERN PetscErrorCode DMPatchCreate(MPI_Comm, DM*);
18 
19 PETSC_EXTERN PetscErrorCode DMPatchZoom(DM,Vec,MatStencil,MatStencil,MPI_Comm,DM*,PetscSF*,PetscSF*);
20 PETSC_EXTERN PetscErrorCode DMPatchSolve(DM);
21 PETSC_EXTERN PetscErrorCode DMPatchGetPatchSize(DM,MatStencil*);
22 PETSC_EXTERN PetscErrorCode DMPatchSetPatchSize(DM,MatStencil);
23 PETSC_EXTERN PetscErrorCode DMPatchGetCommSize(DM,MatStencil*);
24 PETSC_EXTERN PetscErrorCode DMPatchSetCommSize(DM,MatStencil);
25 PETSC_EXTERN PetscErrorCode DMPatchGetCoarse(DM,DM*);
26 PETSC_EXTERN PetscErrorCode DMPatchCreateGrid(MPI_Comm,PetscInt,MatStencil,MatStencil,MatStencil,DM*);
27 
28 /*
29  * We want each patch to consist of an entire DM, DMDA at first
30  - We cannot afford to store much more than the data from a single patch in memory
31    - No global PetscSection, only PetscLayout
32    - Optional scatters
33    * There is a storable coarse level, which will also be a traditional DM (DMDA here)
34    * The local and global vectors correspond to a ghosted patch
35  * Need a way to activate a patch
36    * Jack in sizes for l/g vectors
37  - Need routine for viewing a full global vector
38  - Jed handles solver
39 */
40 
41 #endif
42