1 static char help[] = "Partition a mesh in parallel, perhaps with overlap\n\n"; 2 3 #include <petscdmplex.h> 4 #include <petscsf.h> 5 6 /* Sample usage: 7 8 Load a file in serial and distribute it on 24 processes: 9 10 make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -orig_dm_view -dm_view" NP=24 11 12 Load a file in serial and distribute it on 24 processes using a custom partitioner: 13 14 make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/cylinder.med -petscpartitioner_type simple -orig_dm_view -dm_view" NP=24 15 16 Load a file in serial, distribute it, and then redistribute it on 24 processes using two different partitioners: 17 18 make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type simple -load_balance -lb_petscpartitioner_type parmetis -orig_dm_view -dm_view" NP=24 19 20 Load a file in serial, distribute it randomly, refine it in parallel, and then redistribute it on 24 processes using two different partitioners, and view to VTK: 21 22 make -f ./gmakefile test globsearch="dm_impls_plex_tests-ex12_0" EXTRA_OPTIONS="-filename $PETSC_DIR/share/petsc/datafiles/meshes/squaremotor-30.exo -petscpartitioner_type shell -petscpartitioner_shell_random -dm_refine 1 -load_balance -lb_petscpartitioner_type parmetis -prelb_dm_view vtk:$PWD/prelb.vtk -dm_view vtk:$PWD/balance.vtk -dm_partition_view" NP=24 23 24 */ 25 26 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_REDISTRIBUTE}; 27 28 typedef struct { 29 /* Domain and mesh definition */ 30 PetscInt overlap; /* The cell overlap to use during partitioning */ 31 PetscBool testPartition; /* Use a fixed partitioning for testing */ 32 PetscBool testRedundant; /* Use a redundant partitioning for testing */ 33 PetscBool loadBalance; /* Load balance via a second distribute step */ 34 PetscBool partitionBalance; /* Balance shared point partition */ 35 PetscLogStage stages[4]; 36 } AppCtx; 37 38 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 39 { 40 PetscErrorCode ierr; 41 42 PetscFunctionBegin; 43 options->overlap = 0; 44 options->testPartition = PETSC_FALSE; 45 options->testRedundant = PETSC_FALSE; 46 options->loadBalance = PETSC_FALSE; 47 options->partitionBalance = PETSC_FALSE; 48 49 ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");PetscCall(ierr); 50 PetscCall(PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex12.c", options->overlap, &options->overlap, NULL,0)); 51 PetscCall(PetscOptionsBool("-test_partition", "Use a fixed partition for testing", "ex12.c", options->testPartition, &options->testPartition, NULL)); 52 PetscCall(PetscOptionsBool("-test_redundant", "Use a redundant partition for testing", "ex12.c", options->testRedundant, &options->testRedundant, NULL)); 53 PetscCall(PetscOptionsBool("-load_balance", "Perform parallel load balancing in a second distribution step", "ex12.c", options->loadBalance, &options->loadBalance, NULL)); 54 PetscCall(PetscOptionsBool("-partition_balance", "Balance the ownership of shared points", "ex12.c", options->partitionBalance, &options->partitionBalance, NULL)); 55 ierr = PetscOptionsEnd();PetscCall(ierr); 56 57 PetscCall(PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD])); 58 PetscCall(PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE])); 59 PetscCall(PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE])); 60 PetscCall(PetscLogStageRegister("MeshRedistribute", &options->stages[STAGE_REDISTRIBUTE])); 61 PetscFunctionReturn(0); 62 } 63 64 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) 65 { 66 DM pdm = NULL; 67 PetscInt triSizes_n2[2] = {4, 4}; 68 PetscInt triPoints_n2[8] = {0, 1, 4, 6, 2, 3, 5, 7}; 69 PetscInt triSizes_n3[3] = {3, 2, 3}; 70 PetscInt triPoints_n3[8] = {3, 5, 6, 1, 7, 0, 2, 4}; 71 PetscInt triSizes_n4[4] = {2, 2, 2, 2}; 72 PetscInt triPoints_n4[8] = {0, 7, 1, 5, 2, 3, 4, 6}; 73 PetscInt triSizes_n8[8] = {1, 1, 1, 1, 1, 1, 1, 1}; 74 PetscInt triPoints_n8[8] = {0, 1, 2, 3, 4, 5, 6, 7}; 75 PetscInt quadSizes[2] = {2, 2}; 76 PetscInt quadPoints[4] = {2, 3, 0, 1}; 77 PetscInt overlap = user->overlap >= 0 ? user->overlap : 0; 78 PetscInt dim; 79 PetscBool simplex; 80 PetscMPIInt rank, size; 81 82 PetscFunctionBegin; 83 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 84 PetscCallMPI(MPI_Comm_size(comm, &size)); 85 PetscCall(PetscLogStagePush(user->stages[STAGE_LOAD])); 86 PetscCall(DMCreate(comm, dm)); 87 PetscCall(DMSetType(*dm, DMPLEX)); 88 PetscCall(DMPlexDistributeSetDefault(*dm, PETSC_FALSE)); 89 PetscCall(DMSetFromOptions(*dm)); 90 PetscCall(DMViewFromOptions(*dm, NULL, "-orig_dm_view")); 91 PetscCall(PetscLogStagePop()); 92 PetscCall(DMGetDimension(*dm, &dim)); 93 PetscCall(DMPlexIsSimplex(*dm, &simplex)); 94 PetscCall(PetscLogStagePush(user->stages[STAGE_DISTRIBUTE])); 95 if (!user->testRedundant) { 96 PetscPartitioner part; 97 98 PetscCall(DMPlexGetPartitioner(*dm, &part)); 99 PetscCall(PetscPartitionerSetFromOptions(part)); 100 PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance)); 101 if (user->testPartition) { 102 const PetscInt *sizes = NULL; 103 const PetscInt *points = NULL; 104 105 if (rank == 0) { 106 if (dim == 2 && simplex && size == 2) { 107 sizes = triSizes_n2; points = triPoints_n2; 108 } else if (dim == 2 && simplex && size == 3) { 109 sizes = triSizes_n3; points = triPoints_n3; 110 } else if (dim == 2 && simplex && size == 4) { 111 sizes = triSizes_n4; points = triPoints_n4; 112 } else if (dim == 2 && simplex && size == 8) { 113 sizes = triSizes_n8; points = triPoints_n8; 114 } else if (dim == 2 && !simplex && size == 2) { 115 sizes = quadSizes; points = quadPoints; 116 } 117 } 118 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL)); 119 PetscCall(PetscPartitionerShellSetPartition(part, size, sizes, points)); 120 } 121 PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm)); 122 } else { 123 PetscSF sf; 124 125 PetscCall(DMPlexGetRedundantDM(*dm, &sf, &pdm)); 126 if (sf) { 127 DM test; 128 129 PetscCall(DMPlexCreate(comm,&test)); 130 PetscCall(PetscObjectSetName((PetscObject)test, "Test SF-migrated Redundant Mesh")); 131 PetscCall(DMPlexMigrate(*dm, sf, test)); 132 PetscCall(DMViewFromOptions(test, NULL, "-redundant_migrated_dm_view")); 133 PetscCall(DMDestroy(&test)); 134 } 135 PetscCall(PetscSFDestroy(&sf)); 136 } 137 if (pdm) { 138 PetscCall(DMDestroy(dm)); 139 *dm = pdm; 140 } 141 PetscCall(PetscLogStagePop()); 142 PetscCall(DMSetFromOptions(*dm)); 143 if (user->loadBalance) { 144 PetscPartitioner part; 145 146 PetscCall(DMViewFromOptions(*dm, NULL, "-prelb_dm_view")); 147 PetscCall(DMPlexSetOptionsPrefix(*dm, "lb_")); 148 PetscCall(PetscLogStagePush(user->stages[STAGE_REDISTRIBUTE])); 149 PetscCall(DMPlexGetPartitioner(*dm, &part)); 150 PetscCall(PetscObjectSetOptionsPrefix((PetscObject) part, "lb_")); 151 PetscCall(PetscPartitionerSetFromOptions(part)); 152 if (user->testPartition) { 153 PetscInt reSizes_n2[2] = {2, 2}; 154 PetscInt rePoints_n2[4] = {2, 3, 0, 1}; 155 if (rank) {rePoints_n2[0] = 1; rePoints_n2[1] = 2, rePoints_n2[2] = 0, rePoints_n2[3] = 3;} 156 157 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSHELL)); 158 PetscCall(PetscPartitionerShellSetPartition(part, size, reSizes_n2, rePoints_n2)); 159 } 160 PetscCall(DMPlexSetPartitionBalance(*dm, user->partitionBalance)); 161 PetscCall(DMPlexDistribute(*dm, overlap, NULL, &pdm)); 162 if (pdm) { 163 PetscCall(DMDestroy(dm)); 164 *dm = pdm; 165 } 166 PetscCall(PetscLogStagePop()); 167 } 168 PetscCall(PetscLogStagePush(user->stages[STAGE_REFINE])); 169 PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view")); 170 PetscCall(PetscLogStagePop()); 171 PetscFunctionReturn(0); 172 } 173 174 int main(int argc, char **argv) 175 { 176 DM dm; 177 AppCtx user; /* user-defined work context */ 178 179 PetscCall(PetscInitialize(&argc, &argv, NULL, help)); 180 PetscCall(ProcessOptions(PETSC_COMM_WORLD, &user)); 181 PetscCall(CreateMesh(PETSC_COMM_WORLD, &user, &dm)); 182 PetscCall(DMDestroy(&dm)); 183 PetscCall(PetscFinalize()); 184 return 0; 185 } 186 187 /*TEST 188 # Parallel, no overlap tests 0-2 189 test: 190 suffix: 0 191 requires: triangle 192 args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex 193 test: 194 suffix: 1 195 requires: triangle 196 nsize: 3 197 args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail 198 test: 199 suffix: 2 200 requires: triangle 201 nsize: 8 202 args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail 203 # Parallel, level-1 overlap tests 3-4 204 test: 205 suffix: 3 206 requires: triangle 207 nsize: 3 208 args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail 209 test: 210 suffix: 4 211 requires: triangle 212 nsize: 8 213 args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail 214 # Parallel, level-2 overlap test 5 215 test: 216 suffix: 5 217 requires: triangle 218 nsize: 8 219 args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail 220 # Parallel load balancing, test 6-7 221 test: 222 suffix: 6 223 requires: triangle 224 nsize: 2 225 args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail 226 test: 227 suffix: 7 228 requires: triangle 229 nsize: 2 230 args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail 231 # Parallel redundant copying, test 8 232 test: 233 suffix: 8 234 requires: triangle 235 nsize: 2 236 args: -dm_coord_space 0 -test_redundant -redundant_migrated_dm_view ascii::ascii_info_detail -dm_view ascii::ascii_info_detail 237 test: 238 suffix: lb_0 239 requires: parmetis 240 nsize: 4 241 args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -prelb_dm_view ::load_balance -dm_view ::load_balance 242 243 # Same tests as above, but with balancing of the shared point partition 244 test: 245 suffix: 9 246 requires: triangle 247 args: -dm_coord_space 0 -dm_view ascii:mesh.tex:ascii_latex -partition_balance 248 test: 249 suffix: 10 250 requires: triangle 251 nsize: 3 252 args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance 253 test: 254 suffix: 11 255 requires: triangle 256 nsize: 8 257 args: -dm_coord_space 0 -test_partition -dm_view ascii::ascii_info_detail -partition_balance 258 # Parallel, level-1 overlap tests 3-4 259 test: 260 suffix: 12 261 requires: triangle 262 nsize: 3 263 args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance 264 test: 265 suffix: 13 266 requires: triangle 267 nsize: 8 268 args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance 269 # Parallel, level-2 overlap test 5 270 test: 271 suffix: 14 272 requires: triangle 273 nsize: 8 274 args: -dm_coord_space 0 -test_partition -overlap 2 -dm_view ascii::ascii_info_detail -partition_balance 275 # Parallel load balancing, test 6-7 276 test: 277 suffix: 15 278 requires: triangle 279 nsize: 2 280 args: -dm_coord_space 0 -test_partition -overlap 1 -dm_view ascii::ascii_info_detail -partition_balance 281 test: 282 suffix: 16 283 requires: triangle 284 nsize: 2 285 args: -dm_coord_space 0 -test_partition -overlap 1 -load_balance -dm_view ascii::ascii_info_detail -partition_balance 286 # Parallel redundant copying, test 8 287 test: 288 suffix: 17 289 requires: triangle 290 nsize: 2 291 args: -dm_coord_space 0 -test_redundant -dm_view ascii::ascii_info_detail -partition_balance 292 test: 293 suffix: lb_1 294 requires: parmetis 295 nsize: 4 296 args: -dm_coord_space 0 -dm_plex_simplex 0 -dm_plex_box_faces 4,4 -petscpartitioner_type shell -petscpartitioner_shell_random -lb_petscpartitioner_type parmetis -load_balance -lb_petscpartitioner_view -partition_balance -prelb_dm_view ::load_balance -dm_view ::load_balance 297 TEST*/ 298