1 static char help[] = "Test scalable partitioning on distributed meshes\n\n";
2
3 #include <petscdmplex.h>
4
5 enum {
6 STAGE_LOAD,
7 STAGE_DISTRIBUTE,
8 STAGE_REFINE,
9 STAGE_OVERLAP
10 };
11
12 typedef struct {
13 PetscLogEvent createMeshEvent;
14 PetscLogStage stages[4];
15 /* Domain and mesh definition */
16 PetscInt overlap; /* The cell overlap to use during partitioning */
17 } AppCtx;
18
ProcessOptions(MPI_Comm comm,AppCtx * options)19 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
20 {
21 PetscFunctionBegin;
22 options->overlap = PETSC_FALSE;
23
24 PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");
25 PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex29.c", options->overlap, &options->overlap, NULL, 0));
26 PetscOptionsEnd();
27
28 PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
29 PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]));
30 PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
31 PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]));
32 PetscCall(PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]));
33 PetscFunctionReturn(PETSC_SUCCESS);
34 }
35
CreateMesh(MPI_Comm comm,AppCtx * user,DM * dm)36 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
37 {
38 PetscMPIInt rank, size;
39
40 PetscFunctionBegin;
41 PetscCall(PetscLogEventBegin(user->createMeshEvent, 0, 0, 0, 0));
42 PetscCallMPI(MPI_Comm_rank(comm, &rank));
43 PetscCallMPI(MPI_Comm_size(comm, &size));
44 PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
45 PetscCall(DMCreate(comm, dm));
46 PetscCall(DMSetType(*dm, DMPLEX));
47 PetscCall(DMSetFromOptions(*dm));
48 PetscCall(PetscLogStagePop());
49 {
50 DM pdm = NULL;
51 PetscPartitioner part;
52
53 PetscCall(DMPlexGetPartitioner(*dm, &part));
54 PetscCall(PetscPartitionerSetFromOptions(part));
55 /* Distribute mesh over processes */
56 PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
57 PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
58 if (pdm) {
59 PetscCall(DMDestroy(dm));
60 *dm = pdm;
61 }
62 PetscCall(PetscLogStagePop());
63 }
64 PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
65 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, "post_"));
66 PetscCall(DMSetFromOptions(*dm));
67 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)*dm, ""));
68 PetscCall(PetscLogStagePop());
69 if (user->overlap) {
70 DM odm = NULL;
71 /* Add the level-1 overlap to refined mesh */
72 PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
73 PetscCall(DMPlexDistributeOverlap(*dm, 1, NULL, &odm));
74 if (odm) {
75 PetscCall(DMView(odm, PETSC_VIEWER_STDOUT_WORLD));
76 PetscCall(DMDestroy(dm));
77 *dm = odm;
78 }
79 PetscCall(PetscLogStagePop());
80 }
81 PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
82 PetscCall(PetscLogEventEnd(user->createMeshEvent, 0, 0, 0, 0));
83 PetscFunctionReturn(PETSC_SUCCESS);
84 }
85
main(int argc,char ** argv)86 int main(int argc, char **argv)
87 {
88 DM dm, pdm;
89 AppCtx user; /* user-defined work context */
90 PetscPartitioner part;
91
92 PetscFunctionBeginUser;
93 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
94 PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
95 PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
96 PetscCall(DMPlexGetPartitioner(dm, &part));
97 PetscCall(PetscPartitionerSetFromOptions(part));
98 PetscCall(DMPlexDistribute(dm, user.overlap, NULL, &pdm));
99 if (pdm) PetscCall(DMViewFromOptions(pdm, NULL, "-pdm_view"));
100 PetscCall(DMDestroy(&dm));
101 PetscCall(DMDestroy(&pdm));
102 PetscCall(PetscFinalize());
103 return 0;
104 }
105
106 /*TEST
107
108 test:
109 suffix: 0
110 requires: ctetgen
111 args: -dm_plex_dim 3 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
112 test:
113 suffix: 1
114 args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
115 test:
116 suffix: quad_0
117 nsize: 2
118 args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view -pdm_view
119
120 TEST*/
121