xref: /petsc/src/dm/impls/plex/tests/ex29.c (revision f1580f4e3ce5d5b2393648fd039d0d41b440385d)
1 static char help[] = "Test scalable partitioning on distributed meshes\n\n";
2 
3 #include <petscdmplex.h>
4 
5 enum {
6   STAGE_LOAD,
7   STAGE_DISTRIBUTE,
8   STAGE_REFINE,
9   STAGE_OVERLAP
10 };
11 
12 typedef struct {
13   PetscLogEvent createMeshEvent;
14   PetscLogStage stages[4];
15   /* Domain and mesh definition */
16   PetscInt      overlap; /* The cell overlap to use during partitioning */
17 } AppCtx;
18 
19 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) {
20   PetscFunctionBegin;
21   options->overlap = PETSC_FALSE;
22 
23   PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
24   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex29.c", options->overlap, &options->overlap, NULL, 0));
25   PetscOptionsEnd();
26 
27   PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
28   PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
29   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
30   PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
31   PetscCall(PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]));
32   PetscFunctionReturn(0);
33 }
34 
35 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) {
36   PetscMPIInt rank, size;
37 
38   PetscFunctionBegin;
39   PetscCall(PetscLogEventBegin(user->createMeshEvent, 0, 0, 0, 0));
40   PetscCallMPI(MPI_Comm_rank(comm, &rank));
41   PetscCallMPI(MPI_Comm_size(comm, &size));
42   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
43   PetscCall(DMCreate(comm, dm));
44   PetscCall(DMSetType(*dm, DMPLEX));
45   PetscCall(DMSetFromOptions(*dm));
46   PetscCall(PetscLogStagePop());
47   {
48     DM               pdm = NULL;
49     PetscPartitioner part;
50 
51     PetscCall(DMPlexGetPartitioner(*dm, &part));
52     PetscCall(PetscPartitionerSetFromOptions(part));
53     /* Distribute mesh over processes */
54     PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
55     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
56     if (pdm) {
57       PetscCall(DMDestroy(dm));
58       *dm = pdm;
59     }
60     PetscCall(PetscLogStagePop());
61   }
62   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
63   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "post_"));
64   PetscCall(DMSetFromOptions(*dm));
65   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, ""));
66   PetscCall(PetscLogStagePop());
67   if (user->overlap) {
68     DM odm = NULL;
69     /* Add the level-1 overlap to refined mesh */
70     PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
71     PetscCall(DMPlexDistributeOverlap(*dm, 1, NULL, &odm));
72     if (odm) {
73       PetscCall(DMView(odm, PETSC_VIEWER_STDOUT_WORLD));
74       PetscCall(DMDestroy(dm));
75       *dm = odm;
76     }
77     PetscCall(PetscLogStagePop());
78   }
79   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
80   PetscCall(PetscLogEventEnd(user->createMeshEvent, 0, 0, 0, 0));
81   PetscFunctionReturn(0);
82 }
83 
84 int main(int argc, char **argv) {
85   DM               dm, pdm;
86   AppCtx           user; /* user-defined work context */
87   PetscPartitioner part;
88 
89   PetscFunctionBeginUser;
90   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
91   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
92   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
93   PetscCall(DMPlexGetPartitioner(dm, &part));
94   PetscCall(PetscPartitionerSetFromOptions(part));
95   PetscCall(DMPlexDistribute(dm, user.overlap, NULL, &pdm));
96   if (pdm) PetscCall(DMViewFromOptions(pdm, NULL, "-pdm_view"));
97   PetscCall(DMDestroy(&dm));
98   PetscCall(DMDestroy(&pdm));
99   PetscCall(PetscFinalize());
100   return 0;
101 }
102 
103 /*TEST
104 
105   test:
106     suffix: 0
107     requires: ctetgen
108     args: -dm_plex_dim 3 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
109   test:
110     suffix: 1
111     args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
112   test:
113     suffix: quad_0
114     nsize: 2
115     args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view -pdm_view
116 
117 TEST*/
118