1 const char help[] = "Set up a PetscSF for halo exchange between local vectors";
2
3 #include <petscdmplex.h>
4 #include <petscsf.h>
5
main(int argc,char ** argv)6 int main(int argc, char **argv)
7 {
8 DM dm;
9 Vec u;
10 PetscSection section;
11 PetscInt dim, numFields, numBC, i;
12 PetscMPIInt rank;
13 PetscInt numComp[2];
14 PetscInt numDof[12];
15 PetscInt *remoteOffsets;
16 PetscSF pointSF;
17 PetscSF sectionSF;
18 PetscScalar *array;
19
20 PetscFunctionBeginUser;
21 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
22 /* Create a mesh */
23 PetscCall(DMCreate(PETSC_COMM_WORLD, &dm));
24 PetscCall(DMSetType(dm, DMPLEX));
25 PetscCall(DMSetFromOptions(dm));
26 PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
27 PetscCall(DMGetDimension(dm, &dim));
28
29 /** Describe the solution variables that are discretized on the mesh */
30 /* Create scalar field u and a vector field v */
31 numFields = 2;
32 numComp[0] = 1;
33 numComp[1] = dim;
34 for (i = 0; i < numFields * (dim + 1); ++i) numDof[i] = 0;
35 /* Let u be defined on cells */
36 numDof[0 * (dim + 1) + dim] = 1;
37 /* Let v be defined on vertices */
38 numDof[1 * (dim + 1) + 0] = dim;
39 /* No boundary conditions */
40 numBC = 0;
41
42 /** Create a PetscSection to handle the layout of the discretized variables */
43 PetscCall(DMSetNumFields(dm, numFields));
44 PetscCall(DMPlexCreateSection(dm, NULL, numComp, numDof, numBC, NULL, NULL, NULL, NULL, §ion));
45 /* Name the Field variables */
46 PetscCall(PetscSectionSetFieldName(section, 0, "u"));
47 PetscCall(PetscSectionSetFieldName(section, 1, "v"));
48 /* Tell the DM to use this data layout */
49 PetscCall(DMSetLocalSection(dm, section));
50
51 /** Construct the communication pattern for halo exchange between local vectors */
52 /* Get the point SF: an object that says which copies of mesh points (cells,
53 * vertices, faces, edges) are copies of points on other processes */
54 PetscCall(DMGetPointSF(dm, &pointSF));
55 /* Relate the locations of ghost degrees of freedom on this process
56 * to their locations of the non-ghost copies on a different process */
57 PetscCall(PetscSFCreateRemoteOffsets(pointSF, section, section, &remoteOffsets));
58 /* Use that information to construct a star forest for halo exchange
59 * for data described by the local section */
60 PetscCall(PetscSFCreateSectionSF(pointSF, section, remoteOffsets, section, §ionSF));
61 PetscCall(PetscFree(remoteOffsets));
62
63 /** Demo of halo exchange */
64 /* Create a Vec with this layout */
65 PetscCall(DMCreateLocalVector(dm, &u));
66 PetscCall(PetscObjectSetName((PetscObject)u, "Local vector"));
67 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
68 /* Set all mesh values to the MPI rank */
69 PetscCall(VecSet(u, (PetscScalar)rank));
70 /* Get the raw array of values */
71 PetscCall(VecGetArrayWrite(u, &array));
72 /*** HALO EXCHANGE ***/
73 PetscCall(PetscSFBcastBegin(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE));
74 /* local work can be done between Begin() and End() */
75 PetscCall(PetscSFBcastEnd(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE));
76 /* Restore the raw array of values */
77 PetscCall(VecRestoreArrayWrite(u, &array));
78 /* View the results: should show which process has the non-ghost copy of each degree of freedom */
79 PetscCall(PetscSectionVecView(section, u, PETSC_VIEWER_STDOUT_WORLD));
80 PetscCall(VecDestroy(&u));
81
82 /** Cleanup */
83 PetscCall(PetscSFDestroy(§ionSF));
84 PetscCall(PetscSectionDestroy(§ion));
85 PetscCall(DMDestroy(&dm));
86 PetscCall(PetscFinalize());
87 return 0;
88 }
89
90 /*TEST
91
92 # Test on a 1D mesh with overlap
93 test:
94 nsize: 3
95 requires: !complex
96 args: -dm_plex_dim 1 -dm_plex_box_faces 3 -dm_refine_pre 1 -petscpartitioner_type simple -dm_distribute_overlap 1
97
98 TEST*/
99