xref: /petsc/src/dm/impls/plex/tests/ex31.c (revision f97672e55eacc8688507b9471cd7ec2664d7f203)
1 static char help[] = "Test that shared points on interface of partitions can be rebalanced.\n\n";
2 static char FILENAME[] = "ex31.c";
3 
4 #include <petscdmplex.h>
5 #include <petscviewerhdf5.h>
6 #include <petscsf.h>
7 
8 typedef struct {
9   PetscBool parallel;        /* Use ParMetis or Metis */
10   PetscBool useInitialGuess; /* Only active when in parallel, uses RefineKway of ParMetis */
11   PetscInt  entityDepth;     /* depth of the entities to rebalance ( 0 => vertices) */
12 } AppCtx;
13 
14 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
15 {
16   PetscFunctionBegin;
17   options->parallel        = PETSC_FALSE;
18   options->useInitialGuess = PETSC_FALSE;
19   options->entityDepth     = 0;
20 
21   PetscOptionsBegin(comm, "", "Meshing Interpolation Test Options", "DMPLEX");
22   PetscCall(PetscOptionsBoundedInt("-entity_depth", "Depth of the entities to rebalance (0 => vertices)", FILENAME, options->entityDepth, &options->entityDepth, NULL,0));
23   PetscCall(PetscOptionsBool("-parallel", "Use ParMetis instead of Metis", FILENAME, options->parallel, &options->parallel, NULL));
24   PetscCall(PetscOptionsBool("-use_initial_guess", "Use RefineKway function of ParMetis", FILENAME, options->useInitialGuess, &options->useInitialGuess, NULL));
25   PetscOptionsEnd();
26   PetscFunctionReturn(0);
27 }
28 
29 static PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm)
30 {
31   PetscFunctionBegin;
32   PetscCall(DMCreate(comm, dm));
33   PetscCall(DMSetType(*dm, DMPLEX));
34   PetscCall(DMSetFromOptions(*dm));
35   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
36   PetscFunctionReturn(0);
37 }
38 
39 int main(int argc, char **argv)
40 {
41   MPI_Comm       comm;
42   DM             dm, dmdist;
43   PetscPartitioner part;
44   AppCtx         user;
45   IS             is=NULL;
46   PetscSection   s=NULL, gsection=NULL;
47   PetscMPIInt    size;
48   PetscSF        sf;
49   PetscInt       pStart, pEnd, p, minBefore, maxBefore, minAfter, maxAfter, gSizeBefore, gSizeAfter;
50   PetscBool      success;
51 
52   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
53   comm = PETSC_COMM_WORLD;
54   PetscCallMPI(MPI_Comm_size(comm, &size));
55   PetscCall(ProcessOptions(comm, &user));
56   PetscCall(CreateMesh(comm, &user, &dm));
57 
58   /* partition dm using PETSCPARTITIONERPARMETIS */
59   PetscCall(DMPlexGetPartitioner(dm, &part));
60   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part,"p_"));
61   PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERPARMETIS));
62   PetscCall(PetscPartitionerSetFromOptions(part));
63   PetscCall(PetscSectionCreate(comm, &s));
64   PetscCall(PetscPartitionerDMPlexPartition(part, dm, NULL, s, &is));
65 
66   PetscCall(DMPlexDistribute(dm, 0, NULL, &dmdist));
67   if (dmdist) {
68     PetscCall(DMDestroy(&dm));
69     dm = dmdist;
70   }
71 
72   /* cleanup */
73   PetscCall(PetscSectionDestroy(&s));
74   PetscCall(ISDestroy(&is));
75 
76   /* We make a PetscSection with a DOF on every mesh entity of depth
77    * user.entityDepth, then make a global section and look at its storage size.
78    * We do the same thing after the rebalancing and then assert that the size
79    * remains the same. We also make sure that the balance has improved at least
80    * a little bit compared to the initial decomposition. */
81 
82   if (size>1) {
83     PetscCall(PetscSectionCreate(comm, &s));
84     PetscCall(PetscSectionSetNumFields(s, 1));
85     PetscCall(PetscSectionSetFieldComponents(s, 0, 1));
86     PetscCall(DMPlexGetDepthStratum(dm, user.entityDepth, &pStart, &pEnd));
87     PetscCall(PetscSectionSetChart(s, pStart, pEnd));
88     for (p = pStart; p < pEnd; ++p) {
89       PetscCall(PetscSectionSetDof(s, p, 1));
90       PetscCall(PetscSectionSetFieldDof(s, p, 0, 1));
91     }
92     PetscCall(PetscSectionSetUp(s));
93     PetscCall(DMGetPointSF(dm, &sf));
94     PetscCall(PetscSectionCreateGlobalSection(s, sf, PETSC_FALSE, PETSC_FALSE, &gsection));
95     PetscCall(PetscSectionGetStorageSize(gsection, &gSizeBefore));
96     minBefore = gSizeBefore;
97     maxBefore = gSizeBefore;
98     PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &gSizeBefore, 1, MPIU_INT, MPI_SUM, comm));
99     PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &minBefore, 1, MPIU_INT, MPI_MIN, comm));
100     PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &maxBefore, 1, MPIU_INT, MPI_MAX, comm));
101     PetscCall(PetscSectionDestroy(&gsection));
102   }
103 
104   PetscCall(DMPlexRebalanceSharedPoints(dm, user.entityDepth, user.useInitialGuess, user.parallel, &success));
105 
106   if (size>1) {
107     PetscCall(PetscSectionCreateGlobalSection(s, sf, PETSC_FALSE, PETSC_FALSE, &gsection));
108     PetscCall(PetscSectionGetStorageSize(gsection, &gSizeAfter));
109     minAfter = gSizeAfter;
110     maxAfter = gSizeAfter;
111     PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &gSizeAfter, 1, MPIU_INT, MPI_SUM, comm));
112     PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &minAfter, 1, MPIU_INT, MPI_MIN, comm));
113     PetscCallMPI(MPI_Allreduce(MPI_IN_PLACE, &maxAfter, 1, MPIU_INT, MPI_MAX, comm));
114     PetscCheck(gSizeAfter == gSizeBefore,comm, PETSC_ERR_PLIB, "Global section has not the same size before and after.");
115     PetscCheck(minAfter >= minBefore && maxAfter <= maxBefore && (minAfter > minBefore || maxAfter < maxBefore),comm, PETSC_ERR_PLIB, "DMPlexRebalanceSharedPoints did not improve mesh point balance.");
116     PetscCall(PetscSectionDestroy(&gsection));
117     PetscCall(PetscSectionDestroy(&s));
118   }
119 
120   PetscCall(DMDestroy(&dm));
121   PetscCall(PetscFinalize());
122   return 0;
123 }
124 
125 /*TEST
126 
127   testset:
128     args: -dm_plex_dim 3 -dm_plex_simplex 0
129 
130     test:
131       # rebalance a mesh
132       suffix: 0
133       nsize: {{2 3 4}}
134       requires: parmetis
135       args: -dm_plex_box_faces {{2,3,4  5,4,3  7,11,5}} -entity_depth {{0 1}} -parallel {{FALSE TRUE}} -use_initial_guess FALSE
136 
137     test:
138       # rebalance a mesh but use the initial guess (uses a random algorithm and gives different results on different machines, so just check that it runs).
139       suffix: 1
140       nsize: {{2 3 4}}
141       requires: parmetis
142       args: -dm_plex_box_faces {{2,3,4  5,4,3  7,11,5}} -entity_depth {{0 1}} -parallel TRUE -use_initial_guess TRUE
143 
144     test:
145       # no-op in serial
146       suffix: 2
147       nsize: {{1}}
148       requires: parmetis
149       args: -dm_plex_box_faces 2,3,4 -entity_depth 0 -parallel FALSE -use_initial_guess FALSE
150 
151 TEST*/
152