1 static char help[] = "Test scalable partitioning on distributed meshes\n\n"; 2 3 #include <petscdmplex.h> 4 5 enum {STAGE_LOAD, STAGE_DISTRIBUTE, STAGE_REFINE, STAGE_OVERLAP}; 6 7 typedef struct { 8 PetscLogEvent createMeshEvent; 9 PetscLogStage stages[4]; 10 /* Domain and mesh definition */ 11 PetscInt overlap; /* The cell overlap to use during partitioning */ 12 } AppCtx; 13 14 PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) 15 { 16 PetscErrorCode ierr; 17 18 PetscFunctionBegin; 19 options->overlap = PETSC_FALSE; 20 21 ierr = PetscOptionsBegin(comm, "", "Meshing Problem Options", "DMPLEX");CHKERRQ(ierr); 22 ierr = PetscOptionsBoundedInt("-overlap", "The cell overlap for partitioning", "ex29.c", options->overlap, &options->overlap, NULL,0);CHKERRQ(ierr); 23 ierr = PetscOptionsEnd();CHKERRQ(ierr); 24 25 ierr = PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);CHKERRQ(ierr); 26 ierr = PetscLogStageRegister("MeshLoad", &options->stages[STAGE_LOAD]);CHKERRQ(ierr); 27 ierr = PetscLogStageRegister("MeshDistribute", &options->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr); 28 ierr = PetscLogStageRegister("MeshRefine", &options->stages[STAGE_REFINE]);CHKERRQ(ierr); 29 ierr = PetscLogStageRegister("MeshOverlap", &options->stages[STAGE_OVERLAP]);CHKERRQ(ierr); 30 PetscFunctionReturn(0); 31 } 32 33 PetscErrorCode CreateMesh(MPI_Comm comm, AppCtx *user, DM *dm) 34 { 35 PetscMPIInt rank, size; 36 PetscErrorCode ierr; 37 38 PetscFunctionBegin; 39 ierr = PetscLogEventBegin(user->createMeshEvent,0,0,0,0);CHKERRQ(ierr); 40 ierr = MPI_Comm_rank(comm, &rank);CHKERRMPI(ierr); 41 ierr = MPI_Comm_size(comm, &size);CHKERRMPI(ierr); 42 ierr = PetscLogStagePush(user->stages[STAGE_LOAD]);CHKERRQ(ierr); 43 ierr = DMCreate(comm, dm);CHKERRQ(ierr); 44 ierr = DMSetType(*dm, DMPLEX);CHKERRQ(ierr); 45 ierr = DMSetFromOptions(*dm);CHKERRQ(ierr); 46 ierr = PetscLogStagePop();CHKERRQ(ierr); 47 { 48 DM pdm = NULL; 49 PetscPartitioner part; 50 51 ierr = DMPlexGetPartitioner(*dm, &part);CHKERRQ(ierr); 52 ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr); 53 /* Distribute mesh over processes */ 54 ierr = PetscLogStagePush(user->stages[STAGE_DISTRIBUTE]);CHKERRQ(ierr); 55 ierr = DMPlexDistribute(*dm, 0, NULL, &pdm);CHKERRQ(ierr); 56 if (pdm) { 57 ierr = DMDestroy(dm);CHKERRQ(ierr); 58 *dm = pdm; 59 } 60 ierr = PetscLogStagePop();CHKERRQ(ierr); 61 } 62 ierr = PetscLogStagePush(user->stages[STAGE_REFINE]);CHKERRQ(ierr); 63 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "post_");CHKERRQ(ierr); 64 ierr = DMSetFromOptions(*dm);CHKERRQ(ierr); 65 ierr = PetscObjectSetOptionsPrefix((PetscObject) *dm, "");CHKERRQ(ierr); 66 ierr = PetscLogStagePop();CHKERRQ(ierr); 67 if (user->overlap) { 68 DM odm = NULL; 69 /* Add the level-1 overlap to refined mesh */ 70 ierr = PetscLogStagePush(user->stages[STAGE_OVERLAP]);CHKERRQ(ierr); 71 ierr = DMPlexDistributeOverlap(*dm, 1, NULL, &odm);CHKERRQ(ierr); 72 if (odm) { 73 ierr = DMView(odm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); 74 ierr = DMDestroy(dm);CHKERRQ(ierr); 75 *dm = odm; 76 } 77 ierr = PetscLogStagePop();CHKERRQ(ierr); 78 } 79 ierr = DMViewFromOptions(*dm, NULL, "-dm_view");CHKERRQ(ierr); 80 ierr = PetscLogEventEnd(user->createMeshEvent,0,0,0,0);CHKERRQ(ierr); 81 PetscFunctionReturn(0); 82 } 83 84 int main(int argc, char **argv) 85 { 86 DM dm, pdm; 87 AppCtx user; /* user-defined work context */ 88 PetscErrorCode ierr; 89 PetscPartitioner part; 90 91 ierr = PetscInitialize(&argc, &argv, NULL, help);if (ierr) return ierr; 92 ierr = ProcessOptions(PETSC_COMM_WORLD, &user);CHKERRQ(ierr); 93 ierr = CreateMesh(PETSC_COMM_WORLD, &user, &dm);CHKERRQ(ierr); 94 ierr = DMPlexGetPartitioner(dm, &part);CHKERRQ(ierr); 95 ierr = PetscPartitionerSetFromOptions(part);CHKERRQ(ierr); 96 ierr = DMPlexDistribute(dm, user.overlap, NULL, &pdm);CHKERRQ(ierr); 97 if (pdm) {ierr = DMViewFromOptions(pdm, NULL, "-pdm_view");CHKERRQ(ierr);} 98 ierr = DMDestroy(&dm);CHKERRQ(ierr); 99 ierr = DMDestroy(&pdm);CHKERRQ(ierr); 100 ierr = PetscFinalize(); 101 return ierr; 102 } 103 104 /*TEST 105 106 test: 107 suffix: 0 108 requires: ctetgen 109 args: -dm_plex_dim 3 -post_dm_refine 2 -petscpartitioner_type simple -dm_view 110 test: 111 suffix: 1 112 args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view 113 test: 114 suffix: quad_0 115 nsize: 2 116 args: -dm_plex_dim 3 -dm_plex_simplex 0 -post_dm_refine 2 -petscpartitioner_type simple -dm_view -pdm_view 117 118 TEST*/ 119