xref: /petsc/src/dm/impls/swarm/tests/ex1.c (revision f8b9f887efa5534f423807e37a1fe2e87d779bac)
1 static const char help[] = "Test initialization and migration with swarm.\n";
2 
3 #include <petscdmplex.h>
4 #include <petscdmswarm.h>
5 
6 typedef struct {
7   PetscReal L[3]; /* Dimensions of the mesh bounding box */
8   PetscBool setClosurePermutation;
9 } AppCtx;
10 
11 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
12 {
13   PetscFunctionBeginUser;
14   PetscOptionsBegin(comm, "", "Swarm configuration options", "DMSWARM");
15   options->setClosurePermutation = PETSC_FALSE;
16   PetscCall(PetscOptionsBool("-set_closure_permutation", "Set the closure permutation to tensor ordering", NULL, options->setClosurePermutation, &options->setClosurePermutation, NULL));
17   PetscOptionsEnd();
18   PetscFunctionReturn(PETSC_SUCCESS);
19 }
20 
21 static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
22 {
23   PetscReal low[3], high[3];
24   PetscInt  cdim, d;
25 
26   PetscFunctionBeginUser;
27   PetscCall(DMCreate(comm, dm));
28   PetscCall(DMSetType(*dm, DMPLEX));
29   PetscCall(DMSetFromOptions(*dm));
30   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
31   if (user->setClosurePermutation) {
32     DM cdm;
33 
34     // -- Set tensor permutation
35     PetscCall(DMGetCoordinateDM(*dm, &cdm));
36     PetscCall(DMPlexSetClosurePermutationTensor(*dm, PETSC_DETERMINE, NULL));
37     PetscCall(DMPlexSetClosurePermutationTensor(cdm, PETSC_DETERMINE, NULL));
38   }
39   PetscCall(DMGetCoordinateDim(*dm, &cdim));
40   PetscCall(DMGetBoundingBox(*dm, low, high));
41   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "dim: %" PetscInt_FMT "\n", cdim));
42   for (d = 0; d < cdim; ++d) user->L[d] = high[d] - low[d];
43   PetscFunctionReturn(PETSC_SUCCESS);
44 }
45 
46 /*
47   This function initializes all particles on rank 0.
48   They are sent to other ranks to test migration across non nearest neighbors
49 */
50 static PetscErrorCode CreateSwarm(DM dm, DM *sw, AppCtx *user)
51 {
52   DMSwarmCellDM celldm;
53   PetscInt      particleInitSize = 10;
54   PetscReal    *coords, upper[3], lower[3];
55   PetscInt     *swarm_cellid, Np, dim, Nfc;
56   PetscMPIInt   rank, size;
57   MPI_Comm      comm;
58   const char  **coordFields, *cellid;
59 
60   PetscFunctionBegin;
61   comm = PETSC_COMM_WORLD;
62   PetscCallMPI(MPI_Comm_rank(comm, &rank));
63   PetscCallMPI(MPI_Comm_size(comm, &size));
64   PetscCall(DMGetBoundingBox(dm, lower, upper));
65   PetscCall(DMCreate(PETSC_COMM_WORLD, sw));
66   PetscCall(DMGetDimension(dm, &dim));
67   PetscCall(DMSetType(*sw, DMSWARM));
68   PetscCall(DMSetDimension(*sw, dim));
69   PetscCall(DMSwarmSetType(*sw, DMSWARM_PIC));
70   PetscCall(DMSwarmSetCellDM(*sw, dm));
71   PetscCall(DMSwarmFinalizeFieldRegister(*sw));
72   PetscCall(DMSwarmSetLocalSizes(*sw, rank == 0 ? particleInitSize : 0, 0));
73   PetscCall(DMSetFromOptions(*sw));
74   PetscCall(DMSwarmGetCellDMActive(*sw, &celldm));
75   PetscCall(DMSwarmCellDMGetCellID(celldm, &cellid));
76   PetscCall(DMSwarmCellDMGetCoordinateFields(celldm, &Nfc, &coordFields));
77   PetscCheck(Nfc == 1, PetscObjectComm((PetscObject)sw), PETSC_ERR_SUP, "We only support a single coordinate field right now, not %" PetscInt_FMT, Nfc);
78   PetscCall(DMSwarmGetField(*sw, coordFields[0], NULL, NULL, (void **)&coords));
79   PetscCall(DMSwarmGetField(*sw, cellid, NULL, NULL, (void **)&swarm_cellid));
80   PetscCall(DMSwarmGetLocalSize(*sw, &Np));
81   for (PetscInt p = 0; p < Np; ++p) {
82     for (PetscInt d = 0; d < dim; ++d) { coords[p * dim + d] = 0.5 * (upper[d] - lower[d]); }
83     coords[p * dim + 1] = (upper[1] - lower[1]) / particleInitSize * p + lower[1];
84     swarm_cellid[p]     = 0;
85   }
86   PetscCall(DMSwarmRestoreField(*sw, coordFields[0], NULL, NULL, (void **)&coords));
87   PetscCall(DMSwarmRestoreField(*sw, cellid, NULL, NULL, (void **)&swarm_cellid));
88   PetscCall(DMViewFromOptions(*sw, NULL, "-sw_view"));
89   PetscFunctionReturn(PETSC_SUCCESS);
90 }
91 
92 /*
93   Configure the swarm on rank 0 with all particles
94   located there, then migrate where they need to be
95 */
96 static PetscErrorCode CheckMigrate(DM sw)
97 {
98   Vec       preMigrate, postMigrate, tmp;
99   PetscInt  preSize, postSize;
100   PetscReal prenorm, postnorm;
101 
102   PetscFunctionBeginUser;
103   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
104   PetscCall(VecDuplicate(tmp, &preMigrate));
105   PetscCall(VecCopy(tmp, preMigrate));
106   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
107   PetscCall(DMSwarmMigrate(sw, PETSC_TRUE));
108   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
109   PetscCall(VecDuplicate(tmp, &postMigrate));
110   PetscCall(VecCopy(tmp, postMigrate));
111   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
112   PetscCall(VecGetSize(preMigrate, &preSize));
113   PetscCall(VecGetSize(postMigrate, &postSize));
114   PetscCheck(preSize == postSize, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "Particles either lost or duplicated. Pre migrate global size %" PetscInt_FMT " != Post migrate size %" PetscInt_FMT, preSize, postSize);
115   PetscCall(VecNorm(preMigrate, NORM_2, &prenorm));
116   PetscCall(VecNorm(postMigrate, NORM_2, &postnorm));
117   PetscCheck(PetscAbsReal(prenorm - postnorm) < 100. * PETSC_MACHINE_EPSILON, PETSC_COMM_SELF, PETSC_ERR_COR, "Particle coordinates corrupted in migrate with abs(norm(pre) - norm(post)) = %.16g", PetscAbsReal(prenorm - postnorm));
118   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Migrate check passes.\n"));
119   PetscCall(VecDestroy(&preMigrate));
120   PetscCall(VecDestroy(&postMigrate));
121   PetscFunctionReturn(PETSC_SUCCESS);
122 }
123 
124 /*
125   Checks added points persist on migrate
126 */
127 static PetscErrorCode CheckPointInsertion(DM sw)
128 {
129   PetscInt    Np_pre, Np_post;
130   PetscMPIInt rank, size;
131   MPI_Comm    comm;
132 
133   PetscFunctionBeginUser;
134   comm = PETSC_COMM_WORLD;
135   PetscCallMPI(MPI_Comm_rank(comm, &rank));
136   PetscCallMPI(MPI_Comm_size(comm, &size));
137   PetscCall(PetscPrintf(comm, "Basic point insertion check...\n"));
138   PetscCall(DMSwarmGetSize(sw, &Np_pre));
139   if (rank == 0) PetscCall(DMSwarmAddPoint(sw));
140   PetscCall(DMSwarmGetSize(sw, &Np_post));
141   PetscCheck(Np_post == (Np_pre + 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Particle insertion failed. Global size pre insertion: %" PetscInt_FMT " global size post insertion: %" PetscInt_FMT, Np_pre, Np_post);
142   PetscCall(CheckMigrate(sw));
143   PetscCall(PetscPrintf(comm, "Basic point insertion check passes.\n"));
144   PetscFunctionReturn(PETSC_SUCCESS);
145 }
146 
147 /*
148   Checks tie breaking works properly when a particle
149   is located at a shared boundary. The higher rank should
150   receive the particle while the lower rank deletes it.
151 
152   TODO: Currently only works for 2 procs.
153 */
154 static PetscErrorCode CheckPointInsertion_Boundary(DM sw)
155 {
156   PetscInt    Np_loc_pre, Np_loc_post, dim;
157   PetscMPIInt rank, size;
158   PetscReal   lbox_low[3], lbox_high[3], gbox_low[3], gbox_high[3];
159   MPI_Comm    comm;
160   DM          cdm;
161 
162   PetscFunctionBeginUser;
163   comm = PETSC_COMM_WORLD;
164   PetscCallMPI(MPI_Comm_rank(comm, &rank));
165   PetscCallMPI(MPI_Comm_size(comm, &size));
166   PetscCall(PetscPrintf(comm, "Rank boundary point insertion check...\n"));
167   PetscCall(DMSwarmGetCellDM(sw, &cdm));
168   PetscCall(DMGetDimension(cdm, &dim));
169   PetscCall(DMGetBoundingBox(cdm, gbox_low, gbox_high));
170   if (rank == 0) {
171     PetscReal *coords;
172     PetscInt   adjacentdim = 0, Np;
173 
174     PetscCall(DMGetLocalBoundingBox(cdm, lbox_low, lbox_high));
175     // find a face that belongs to the neighbor.
176     for (PetscInt d = 0; d < dim; ++d) {
177       if ((gbox_high[d] - lbox_high[d]) != 0.) adjacentdim = d;
178     }
179     PetscCall(DMSwarmAddPoint(sw));
180     PetscCall(DMSwarmGetLocalSize(sw, &Np));
181     PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
182     for (PetscInt d = 0; d < dim; ++d) coords[(Np - 1) * dim + d] = 0.5 * (lbox_high[d] - lbox_low[d]);
183     coords[(Np - 1) * dim + adjacentdim] = lbox_high[adjacentdim];
184     PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
185   }
186   PetscCall(DMSwarmGetLocalSize(sw, &Np_loc_pre));
187   PetscCall(CheckMigrate(sw));
188   PetscCall(DMSwarmGetLocalSize(sw, &Np_loc_post));
189   if (rank == 0) PetscCheck(Np_loc_pre == (Np_loc_post + 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Migration tie breaking failed on rank %d. Particle on boundary not sent.", rank);
190   if (rank == 1) PetscCheck(Np_loc_pre == (Np_loc_post - 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Migration tie breaking failed on rank %d. Particle on boundary not received.", rank);
191   PetscCall(PetscPrintf(comm, "Rank boundary point insertion check passes.\n"));
192   PetscFunctionReturn(PETSC_SUCCESS);
193 }
194 
195 int main(int argc, char **argv)
196 {
197   DM       dm, sw;
198   MPI_Comm comm;
199   AppCtx   user;
200 
201   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
202   comm = PETSC_COMM_WORLD;
203   PetscCall(ProcessOptions(comm, &user));
204   PetscCall(CreateMesh(comm, &dm, &user));
205   PetscCall(CreateSwarm(dm, &sw, &user));
206   PetscCall(CheckMigrate(sw));
207   PetscCall(CheckPointInsertion(sw));
208   PetscCall(CheckPointInsertion_Boundary(sw));
209   PetscCall(DMDestroy(&sw));
210   PetscCall(DMDestroy(&dm));
211   PetscCall(PetscFinalize());
212   return 0;
213 }
214 
215 /*TEST
216 
217   # Swarm does not handle complex or quad
218   build:
219     requires: !complex double
220   # swarm_migrate_hash and swarm_migrate_scan test swarm migration against point location types
221   # with a distributed mesh where ranks overlap by 1. Points in the shared boundary should
222   # be sent to the process which has the highest rank that has that portion of the domain.
223   test:
224     suffix: swarm_migrate_vec_hip_scan
225     nsize: 2
226     requires: hip
227     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
228           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
229           -dm_plex_hash_location false -dm_vec_type hip -dm_plex_hash_location false
230   test:
231     suffix: swarm_migrate_vec_hip_hash
232     nsize: 2
233     requires: hip
234     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
235           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
236           -dm_plex_hash_location false -dm_vec_type hip -dm_plex_hash_location true
237   test:
238     suffix: swarm_migrate_vec_hip_hash_tensor_permutation
239     nsize: 2
240     requires: hip
241     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
242           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
243           -dm_plex_hash_location false -dm_vec_type hip -dm_plex_hash_location true\
244           -set_closure_permutation
245   test:
246     suffix: swarm_migrate_hash
247     nsize: 2
248     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
249           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
250           -dm_plex_hash_location true
251     filter: grep -v marker | grep -v atomic | grep -v usage
252   test:
253     suffix: swarm_migrate_hash_tensor_permutation
254     nsize: 2
255     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
256           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
257           -dm_plex_hash_location true -set_closure_permutation
258     filter: grep -v marker | grep -v atomic | grep -v usage
259   test:
260     suffix: swarm_migrate_scan
261     nsize: 2
262     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
263           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
264           -dm_plex_hash_location false
265     filter: grep -v marker | grep -v atomic | grep -v usage
266   test:
267     suffix: swarm_migrate_scan_tensor_permutation
268     nsize: 2
269     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
270           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
271           -dm_plex_hash_location false -set_closure_permutation
272     filter: grep -v marker | grep -v atomic | grep -v usage
273 TEST*/
274