xref: /petsc/src/dm/impls/swarm/tests/ex1.c (revision fbf9dbe564678ed6eff1806adbc4c4f01b9743f4)
1 static const char help[] = "Test initialization and migration with swarm.\n";
2 
3 #include <petscdmplex.h>
4 #include <petscdmswarm.h>
5 
6 typedef struct {
7   PetscReal L[3]; /* Dimensions of the mesh bounding box */
8 } AppCtx;
9 
10 static PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options)
11 {
12   PetscFunctionBeginUser;
13   PetscOptionsBegin(comm, "", "Swarm configuration options", "DMSWARM");
14   PetscOptionsEnd();
15   PetscFunctionReturn(PETSC_SUCCESS);
16 }
17 
18 static PetscErrorCode CreateMesh(MPI_Comm comm, DM *dm, AppCtx *user)
19 {
20   PetscReal low[3], high[3];
21   PetscInt  cdim, d;
22 
23   PetscFunctionBeginUser;
24   PetscCall(DMCreate(comm, dm));
25   PetscCall(DMSetType(*dm, DMPLEX));
26   PetscCall(DMSetFromOptions(*dm));
27   PetscCall(DMViewFromOptions(*dm, NULL, "-dm_view"));
28   PetscCall(DMGetCoordinateDim(*dm, &cdim));
29   PetscCall(DMGetBoundingBox(*dm, low, high));
30   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "dim: %" PetscInt_FMT "\n", cdim));
31   for (d = 0; d < cdim; ++d) user->L[d] = high[d] - low[d];
32   PetscFunctionReturn(PETSC_SUCCESS);
33 }
34 
35 /*
36   This function initializes all particles on rank 0.
37   They are sent to other ranks to test migration across non nearest neighbors
38 */
39 static PetscErrorCode CreateSwarm(DM dm, DM *sw, AppCtx *user)
40 {
41   PetscInt    particleInitSize = 10;
42   PetscReal  *coords, upper[3], lower[3];
43   PetscInt   *cellid, Np, dim;
44   PetscMPIInt rank, size;
45   MPI_Comm    comm;
46 
47   PetscFunctionBegin;
48   comm = PETSC_COMM_WORLD;
49   PetscCallMPI(MPI_Comm_rank(comm, &rank));
50   PetscCallMPI(MPI_Comm_size(comm, &size));
51   PetscCall(DMGetBoundingBox(dm, lower, upper));
52   PetscCall(DMCreate(PETSC_COMM_WORLD, sw));
53   PetscCall(DMGetDimension(dm, &dim));
54   PetscCall(DMSetType(*sw, DMSWARM));
55   PetscCall(DMSetDimension(*sw, dim));
56   PetscCall(DMSwarmSetType(*sw, DMSWARM_PIC));
57   PetscCall(DMSwarmSetCellDM(*sw, dm));
58   PetscCall(DMSwarmFinalizeFieldRegister(*sw));
59   PetscCall(DMSwarmSetLocalSizes(*sw, rank == 0 ? particleInitSize : 0, 0));
60   PetscCall(DMSetFromOptions(*sw));
61   PetscCall(DMSwarmGetField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
62   PetscCall(DMSwarmGetField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid));
63   PetscCall(DMSwarmGetLocalSize(*sw, &Np));
64   for (PetscInt p = 0; p < Np; ++p) {
65     for (PetscInt d = 0; d < dim; ++d) { coords[p * dim + d] = 0.5 * (upper[d] - lower[d]); }
66     coords[p * dim + 1] = (upper[1] - lower[1]) / particleInitSize * p + lower[1];
67     cellid[p]           = 0;
68   }
69   PetscCall(DMSwarmRestoreField(*sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
70   PetscCall(DMSwarmRestoreField(*sw, DMSwarmPICField_cellid, NULL, NULL, (void **)&cellid));
71   PetscCall(DMViewFromOptions(*sw, NULL, "-sw_view"));
72   PetscFunctionReturn(PETSC_SUCCESS);
73 }
74 
75 /*
76   Configure the swarm on rank 0 with all particles
77   located there, then migrate where they need to be
78 */
79 static PetscErrorCode CheckMigrate(DM sw)
80 {
81   Vec       preMigrate, postMigrate, tmp;
82   PetscInt  preSize, postSize;
83   PetscReal prenorm, postnorm;
84 
85   PetscFunctionBeginUser;
86   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
87   PetscCall(VecDuplicate(tmp, &preMigrate));
88   PetscCall(VecCopy(tmp, preMigrate));
89   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
90   PetscCall(DMSwarmMigrate(sw, PETSC_TRUE));
91   PetscCall(DMSwarmCreateGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
92   PetscCall(VecDuplicate(tmp, &postMigrate));
93   PetscCall(VecCopy(tmp, postMigrate));
94   PetscCall(DMSwarmDestroyGlobalVectorFromField(sw, DMSwarmPICField_coor, &tmp));
95   PetscCall(VecGetSize(preMigrate, &preSize));
96   PetscCall(VecGetSize(postMigrate, &postSize));
97   PetscCheck(preSize == postSize, PETSC_COMM_WORLD, PETSC_ERR_ARG_SIZ, "Particles either lost or duplicated. Pre migrate global size %" PetscInt_FMT " != Post migrate size %" PetscInt_FMT "", preSize, postSize);
98   PetscCall(VecNorm(preMigrate, NORM_2, &prenorm));
99   PetscCall(VecNorm(postMigrate, NORM_2, &postnorm));
100   PetscCheck(PetscAbsReal(prenorm - postnorm) < 100. * PETSC_MACHINE_EPSILON, PETSC_COMM_SELF, PETSC_ERR_COR, "Particle coordinates corrupted in migrate with abs(norm(pre) - norm(post)) = %.16g", PetscAbsReal(prenorm - postnorm));
101   PetscCall(PetscPrintf(PETSC_COMM_WORLD, "Migrate check passes.\n"));
102   PetscCall(VecDestroy(&preMigrate));
103   PetscCall(VecDestroy(&postMigrate));
104   PetscFunctionReturn(PETSC_SUCCESS);
105 }
106 
107 /*
108   Checks added points persist on migrate
109 */
110 static PetscErrorCode CheckPointInsertion(DM sw)
111 {
112   PetscInt    Np_pre, Np_post;
113   PetscMPIInt rank, size;
114   MPI_Comm    comm;
115 
116   PetscFunctionBeginUser;
117   comm = PETSC_COMM_WORLD;
118   PetscCallMPI(MPI_Comm_rank(comm, &rank));
119   PetscCallMPI(MPI_Comm_size(comm, &size));
120   PetscCall(PetscPrintf(comm, "Basic point insertion check...\n"));
121   PetscCall(DMSwarmGetSize(sw, &Np_pre));
122   if (rank == 0) PetscCall(DMSwarmAddPoint(sw));
123   PetscCall(DMSwarmGetSize(sw, &Np_post));
124   PetscCheck(Np_post == (Np_pre + 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Particle insertion failed. Global size pre insertion: %" PetscInt_FMT " global size post insertion: %" PetscInt_FMT, Np_pre, Np_post);
125   PetscCall(CheckMigrate(sw));
126   PetscCall(PetscPrintf(comm, "Basic point insertion check passes.\n"));
127   PetscFunctionReturn(PETSC_SUCCESS);
128 }
129 
130 /*
131   Checks tie breaking works properly when a particle
132   is located at a shared boundary. The higher rank should
133   recieve the particle while the lower rank deletes it.
134 
135   TODO: Currently only works for 2 procs.
136 */
137 static PetscErrorCode CheckPointInsertion_Boundary(DM sw)
138 {
139   PetscInt    Np_loc_pre, Np_loc_post, dim;
140   PetscMPIInt rank, size;
141   PetscReal   lbox_low[3], lbox_high[3], gbox_low[3], gbox_high[3];
142   MPI_Comm    comm;
143   DM          cdm;
144 
145   PetscFunctionBeginUser;
146   comm = PETSC_COMM_WORLD;
147   PetscCallMPI(MPI_Comm_rank(comm, &rank));
148   PetscCallMPI(MPI_Comm_size(comm, &size));
149   PetscCall(PetscPrintf(comm, "Rank boundary point insertion check...\n"));
150   PetscCall(DMSwarmGetCellDM(sw, &cdm));
151   PetscCall(DMGetDimension(cdm, &dim));
152   PetscCall(DMGetBoundingBox(cdm, gbox_low, gbox_high));
153   if (rank == 0) {
154     PetscReal *coords;
155     PetscInt   adjacentdim = 0, Np;
156 
157     PetscCall(DMGetLocalBoundingBox(cdm, lbox_low, lbox_high));
158     // find a face that belongs to the neighbor.
159     for (PetscInt d = 0; d < dim; ++d) {
160       if ((gbox_high[d] - lbox_high[d]) != 0.) adjacentdim = d;
161     }
162     PetscCall(DMSwarmAddPoint(sw));
163     PetscCall(DMSwarmGetLocalSize(sw, &Np));
164     PetscCall(DMSwarmGetField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
165     for (PetscInt d = 0; d < dim; ++d) coords[(Np - 1) * dim + d] = 0.5 * (lbox_high[d] - lbox_low[d]);
166     coords[(Np - 1) * dim + adjacentdim] = lbox_high[adjacentdim];
167     PetscCall(DMSwarmRestoreField(sw, DMSwarmPICField_coor, NULL, NULL, (void **)&coords));
168   }
169   PetscCall(DMSwarmGetLocalSize(sw, &Np_loc_pre));
170   PetscCall(CheckMigrate(sw));
171   PetscCall(DMSwarmGetLocalSize(sw, &Np_loc_post));
172   if (rank == 0) PetscCheck(Np_loc_pre == (Np_loc_post + 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Migration tie breaking failed on rank %d. Particle on boundary not sent.", rank);
173   if (rank == 1) PetscCheck(Np_loc_pre == (Np_loc_post - 1), PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Migration tie breaking failed on rank %d. Particle on boundary not recieved.", rank);
174   PetscCall(PetscPrintf(comm, "Rank boundary point insertion check passes.\n"));
175   PetscFunctionReturn(PETSC_SUCCESS);
176 }
177 
178 int main(int argc, char **argv)
179 {
180   DM       dm, sw;
181   MPI_Comm comm;
182   AppCtx   user;
183 
184   PetscCall(PetscInitialize(&argc, &argv, NULL, help));
185   comm = PETSC_COMM_WORLD;
186   PetscCall(ProcessOptions(comm, &user));
187   PetscCall(CreateMesh(comm, &dm, &user));
188   PetscCall(CreateSwarm(dm, &sw, &user));
189   PetscCall(CheckMigrate(sw));
190   PetscCall(CheckPointInsertion(sw));
191   PetscCall(CheckPointInsertion_Boundary(sw));
192   PetscCall(DMDestroy(&sw));
193   PetscCall(DMDestroy(&dm));
194   PetscCall(PetscFinalize());
195   return 0;
196 }
197 
198 /*TEST
199 
200   # Swarm does not handle complex or quad
201   build:
202     requires: !complex double
203   # swarm_migrate_hash and swarm_migrate_scan test swarm migration against point location types
204   # with a distributed mesh where ranks overlap by 1. Points in the shared boundary should
205   # be sent to the process which has the highest rank that has that portion of the domain.
206   test:
207     suffix: swarm_migrate_hash
208     requires: ctetgen
209     nsize: 2
210     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
211           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
212           -dm_plex_hash_location true
213     filter: grep -v marker | grep -v atomic | grep -v usage
214   test:
215     suffix: swarm_migrate_scan
216     requires: ctetgen
217     nsize: 2
218     args: -dm_plex_dim 3 -dm_plex_simplex 0 -dm_distribute_overlap 1 -dm_plex_box_faces 10,10,10\
219           -dm_plex_box_lower 0.,0.,0. -dm_plex_box_upper 1.,1.,10. -dm_plex_box_bd none,none,none\
220           -dm_plex_hash_location false
221     filter: grep -v marker | grep -v atomic | grep -v usage
222 TEST*/
223