xref: /petsc/src/dm/impls/plex/tests/ex29.c (revision 40badf4fbc550ac1f60bd080eaff6de6d55b946d)
1 static char help[] = "Test scalable partitioning on distributed meshes\n\n";
2 
3 #include <petscdmplex.h>
4 
5 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
6 
7 typedef struct {
8   PetscLogEvent createMeshEvent;
9   PetscLogStage stages[4];
10   /* Domain and mesh definition */
11   PetscInt overlap; /* The cell overlap to use during partitioning */
12 } AppCtx;
13 
14 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
15 {
16   PetscErrorCode ierr;
17 
18   PetscFunctionBegin;
19   options->overlap = PETSC_FALSE;
20 
21   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr);
22   CHKERRQ(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex29.c", options->overlap, &options->overlap, NULL,0));
23   ierr = PetscOptionsEnd();CHKERRQ(ierr);
24 
25   CHKERRQ(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
26   CHKERRQ(PetscLogStageRegister("MeshLoad",       &options->stages[STAGE_LOAD]));
27   CHKERRQ(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
28   CHKERRQ(PetscLogStageRegister("MeshRefine",     &options->stages[STAGE_REFINE]));
29   CHKERRQ(PetscLogStageRegister("MeshOverlap",    &options->stages[STAGE_OVERLAP]));
30   PetscFunctionReturn(0);
31 }
32 
33 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
34 {
35   PetscMPIInt    rank, size;
36 
37   PetscFunctionBegin;
38   CHKERRQ(PetscLogEventBegin(user->createMeshEvent,0,0,0,0));
39   CHKERRMPI(MPI_Comm_rank(comm, &rank));
40   CHKERRMPI(MPI_Comm_size(comm, &size));
41   CHKERRQ(PetscLogStagePush(user->stages[STAGE_LOAD]));
42   CHKERRQ(DMCreate(comm, dm));
43   CHKERRQ(DMSetType(*dm, DMPLEX));
44   CHKERRQ(DMSetFromOptions(*dm));
45   CHKERRQ(PetscLogStagePop());
46   {
47     DM               pdm = NULL;
48     PetscPartitioner part;
49 
50     CHKERRQ(DMPlexGetPartitioner(*dm, &part));
51     CHKERRQ(PetscPartitionerSetFromOptions(part));
52     /* Distribute mesh over processes */
53     CHKERRQ(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
54     CHKERRQ(DMPlexDistribute(*dm, 0, NULL, &pdm));
55     if (pdm) {
56       CHKERRQ(DMDestroy(dm));
57       *dm  = pdm;
58     }
59     CHKERRQ(PetscLogStagePop());
60   }
61   CHKERRQ(PetscLogStagePush(user->stages[STAGE_REFINE]));
62   CHKERRQ(PetscObjectSetOptionsPrefix((PetscObject) *dm, "post_"));
63   CHKERRQ(DMSetFromOptions(*dm));
64   CHKERRQ(PetscObjectSetOptionsPrefix((PetscObject) *dm, ""));
65   CHKERRQ(PetscLogStagePop());
66   if (user->overlap) {
67     DM odm = NULL;
68     /* Add the level-1 overlap to refined mesh */
69     CHKERRQ(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
70     CHKERRQ(DMPlexDistributeOverlap(*dm, 1, NULL, &odm));
71     if (odm) {
72       CHKERRQ(DMView(odm, PETSC_VIEWER_STDOUT_WORLD));
73       CHKERRQ(DMDestroy(dm));
74       *dm = odm;
75     }
76     CHKERRQ(PetscLogStagePop());
77   }
78   CHKERRQ(DMViewFromOptions(*dm, NULL, "-dm_view"));
79   CHKERRQ(PetscLogEventEnd(user->createMeshEvent,0,0,0,0));
80   PetscFunctionReturn(0);
81 }
82 
83 int main(int argc, char **argv)
84 {
85   DM             dm, pdm;
86   AppCtx         user;                 /* user-defined work context */
87   PetscErrorCode ierr;
88   PetscPartitioner part;
89 
90   ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr;
91   CHKERRQ(ProcessOptions(PETSC_COMM_WORLD, &user));
92   CHKERRQ(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
93   CHKERRQ(DMPlexGetPartitioner(dm, &part));
94   CHKERRQ(PetscPartitionerSetFromOptions(part));
95   CHKERRQ(DMPlexDistribute(dm, user.overlap, NULL, &pdm));
96   if (pdm) CHKERRQ(DMViewFromOptions(pdm, NULL, "-pdm_view"));
97   CHKERRQ(DMDestroy(&dm));
98   CHKERRQ(DMDestroy(&pdm));
99   ierr = PetscFinalize();
100   return ierr;
101 }
102 
103 /*TEST
104 
105   test:
106     suffix: 0
107     requires: ctetgen
108     args: -dm_plex_dim 3 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
109   test:
110     suffix: 1
111     args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
112   test:
113     suffix: quad_0
114     nsize: 2
115     args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view -pdm_view
116 
117 TEST*/
118