xref: /petsc/src/dm/impls/plex/tests/ex29.c (revision 6a5217c03994f2d95bb2e6dbd8bed42381aeb015)
1 static char help[] = "Test scalable partitioning on distributed meshes\n\n";
2 
3 #include <petscdmplex.h>
4 
5 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP};
6 
7 typedef struct {
8   PetscLogEvent createMeshEvent;
9   PetscLogStage stages[4];
10   /* Domain and mesh definition */
11   PetscInt overlap; /* The cell overlap to use during partitioning */
12 } AppCtx;
13 
14 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
15 {
16   PetscErrorCode ierr;
17 
18   PetscFunctionBegin;
19   options->overlap = PETSC_FALSE;
20 
21   ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");PetscCall(ierr);
22   PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex29.c", options->overlap, &options->overlap, NULL,0));
23   ierr = PetscOptionsEnd();PetscCall(ierr);
24 
25   PetscCall(PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent));
26   PetscCall(PetscLogStageRegister("MeshLoad",       &options->stages[STAGE_LOAD]));
27   PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]));
28   PetscCall(PetscLogStageRegister("MeshRefine",     &options->stages[STAGE_REFINE]));
29   PetscCall(PetscLogStageRegister("MeshOverlap",    &options->stages[STAGE_OVERLAP]));
30   PetscFunctionReturn(0);
31 }
32 
33 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
34 {
35   PetscMPIInt    rank, size;
36 
37   PetscFunctionBegin;
38   PetscCall(PetscLogEventBegin(user->createMeshEvent,0,0,0,0));
39   PetscCallMPI(MPI_Comm_rank(comm, &rank));
40   PetscCallMPI(MPI_Comm_size(comm, &size));
41   PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD]));
42   PetscCall(DMCreate(comm, dm));
43   PetscCall(DMSetType(*dm, DMPLEX));
44   PetscCall(DMSetFromOptions(*dm));
45   PetscCall(PetscLogStagePop());
46   {
47     DM               pdm = NULL;
48     PetscPartitioner part;
49 
50     PetscCall(DMPlexGetPartitioner(*dm, &part));
51     PetscCall(PetscPartitionerSetFromOptions(part));
52     /* Distribute mesh over processes */
53     PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]));
54     PetscCall(DMPlexDistribute(*dm, 0, NULL, &pdm));
55     if (pdm) {
56       PetscCall(DMDestroy(dm));
57       *dm  = pdm;
58     }
59     PetscCall(PetscLogStagePop());
60   }
61   PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE]));
62   PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, "post_"));
63   PetscCall(DMSetFromOptions(*dm));
64   PetscCall(PetscObjectSetOptionsPrefix((PetscObject) *dm, ""));
65   PetscCall(PetscLogStagePop());
66   if (user->overlap) {
67     DM odm = NULL;
68     /* Add the level-1 overlap to refined mesh */
69     PetscCall(PetscLogStagePush(user->stages[STAGE_OVERLAP]));
70     PetscCall(DMPlexDistributeOverlap(*dm, 1, NULL, &odm));
71     if (odm) {
72       PetscCall(DMView(odm, PETSC_VIEWER_STDOUT_WORLD));
73       PetscCall(DMDestroy(dm));
74       *dm = odm;
75     }
76     PetscCall(PetscLogStagePop());
77   }
78   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
79   PetscCall(PetscLogEventEnd(user->createMeshEvent,0,0,0,0));
80   PetscFunctionReturn(0);
81 }
82 
83 int main(int argc, char **argv)
84 {
85   DM             dm, pdm;
86   AppCtx         user;                 /* user-defined work context */
87   PetscPartitioner part;
88 
89   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
90   PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user));
91   PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm));
92   PetscCall(DMPlexGetPartitioner(dm, &part));
93   PetscCall(PetscPartitionerSetFromOptions(part));
94   PetscCall(DMPlexDistribute(dm, user.overlap, NULL, &pdm));
95   if (pdm) PetscCall(DMViewFromOptions(pdm, NULL, "-pdm_view"));
96   PetscCall(DMDestroy(&dm));
97   PetscCall(DMDestroy(&pdm));
98   PetscCall(PetscFinalize());
99   return 0;
100 }
101 
102 /*TEST
103 
104   test:
105     suffix: 0
106     requires: ctetgen
107     args: -dm_plex_dim 3 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
108   test:
109     suffix: 1
110     args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view
111   test:
112     suffix: quad_0
113     nsize: 2
114     args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view -pdm_view
115 
116 TEST*/
117