xref: /petsc/src/dm/impls/plex/tutorials/ex14.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 const char help[] = "Set up a PetscSF for halo exchange between local vectors";
2 
3 #include <petscdmplex.h>
4 #include <petscsf.h>
5 
6 int main(int argc, char **argv) {
7   DM           dm;
8   Vec          u;
9   PetscSection section;
10   PetscInt     dim, numFields, numBC, i;
11   PetscMPIInt  rank;
12   PetscInt     numComp[2];
13   PetscInt     numDof[12];
14   PetscInt    *remoteOffsets;
15   PetscSF      pointSF;
16   PetscSF      sectionSF;
17   PetscScalar *array;
18 
19   PetscFunctionBeginUser;
20   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
21   /* Create a mesh */
22   PetscCall(DMCreate(PETSC_COMM_WORLD, &dm));
23   PetscCall(DMSetType(dm, DMPLEX));
24   PetscCall(DMSetFromOptions(dm));
25   PetscCall(DMViewFromOptions(dm, NULL, "-dm_view"));
26   PetscCall(DMGetDimension(dm, &dim));
27 
28   /** Describe the solution variables that are discretized on the mesh */
29   /* Create scalar field u and a vector field v */
30   numFields  = 2;
31   numComp[0] = 1;
32   numComp[1] = dim;
33   for (i = 0; i < numFields * (dim + 1); ++i) numDof[i] = 0;
34   /* Let u be defined on cells */
35   numDof[0 * (dim + 1) + dim] = 1;
36   /* Let v be defined on vertices */
37   numDof[1 * (dim + 1) + 0]   = dim;
38   /* No boundary conditions */
39   numBC                       = 0;
40 
41   /** Create a PetscSection to handle the layout of the discretized variables */
42   PetscCall(DMSetNumFields(dm, numFields));
43   PetscCall(DMPlexCreateSection(dm, NULL, numComp, numDof, numBC, NULL, NULL, NULL, NULL, &section));
44   /* Name the Field variables */
45   PetscCall(PetscSectionSetFieldName(section, 0, "u"));
46   PetscCall(PetscSectionSetFieldName(section, 1, "v"));
47   /* Tell the DM to use this data layout */
48   PetscCall(DMSetLocalSection(dm, section));
49 
50   /** Construct the communication pattern for halo exchange between local vectors */
51   /* Get the point SF: an object that says which copies of mesh points (cells,
52    * vertices, faces, edges) are copies of points on other processes */
53   PetscCall(DMGetPointSF(dm, &pointSF));
54   /* Relate the locations of ghost degrees of freedom on this process
55    * to their locations of the non-ghost copies on a different process */
56   PetscCall(PetscSFCreateRemoteOffsets(pointSF, section, section, &remoteOffsets));
57   /* Use that information to construct a star forest for halo exchange
58    * for data described by the local section */
59   PetscCall(PetscSFCreateSectionSF(pointSF, section, remoteOffsets, section, &sectionSF));
60   PetscCall(PetscFree(remoteOffsets));
61 
62   /** Demo of halo exchange */
63   /* Create a Vec with this layout */
64   PetscCall(DMCreateLocalVector(dm, &u));
65   PetscCall(PetscObjectSetName((PetscObject)u, "Local vector"));
66   PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
67   /* Set all mesh values to the MPI rank */
68   PetscCall(VecSet(u, (PetscScalar)rank));
69   /* Get the raw array of values */
70   PetscCall(VecGetArrayWrite(u, &array));
71   /*** HALO EXCHANGE ***/
72   PetscCall(PetscSFBcastBegin(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE));
73   /* local work can be done between Begin() and End() */
74   PetscCall(PetscSFBcastEnd(sectionSF, MPIU_SCALAR, array, array, MPI_REPLACE));
75   /* Restore the raw array of values */
76   PetscCall(VecRestoreArrayWrite(u, &array));
77   /* View the results: should show which process has the non-ghost copy of each degree of freedom */
78   PetscCall(PetscSectionVecView(section, u, PETSC_VIEWER_STDOUT_WORLD));
79   PetscCall(VecDestroy(&u));
80 
81   /** Cleanup */
82   PetscCall(PetscSFDestroy(&sectionSF));
83   PetscCall(PetscSectionDestroy(&section));
84   PetscCall(DMDestroy(&dm));
85   PetscCall(PetscFinalize());
86   return 0;
87 }
88 
89 /*TEST
90 
91   # Test on a 1D mesh with overlap
92   test:
93     nsize: 3
94     requires: !complex
95     args: -dm_plex_dim 1 -dm_plex_box_faces 3 -dm_refine_pre 1 -petscpartitioner_type simple -dm_distribute_overlap 1
96 
97 TEST*/
98