xref: /petsc/src/ts/characteristic/interface/characteristic.c (revision 5d7a6ebe9dde080aedbe86be0085708de8f97bb7)
1 
2 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3 #include <petscdmda.h>
4 #include <petscviewer.h>
5 
6 PetscClassId  CHARACTERISTIC_CLASSID;
7 PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8 PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9 PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10 /*
11    Contains the list of registered characteristic routines
12 */
13 PetscFunctionList CharacteristicList              = NULL;
14 PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;
15 
16 static PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
17 static PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18 
19 static PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
20 static PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
21 
22 static PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
23 {
24   PetscBool iascii;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
28   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer));
29   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
30   PetscCheckSameComm(c, 1, viewer, 2);
31 
32   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
33   if (!iascii) PetscTryTypeMethod(c, view, viewer);
34   PetscFunctionReturn(PETSC_SUCCESS);
35 }
36 
37 PetscErrorCode CharacteristicDestroy(Characteristic *c)
38 {
39   PetscFunctionBegin;
40   if (!*c) PetscFunctionReturn(PETSC_SUCCESS);
41   PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1);
42   if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(PETSC_SUCCESS);
43 
44   if ((*c)->ops->destroy) PetscCall((*(*c)->ops->destroy)((*c)));
45   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
46   PetscCall(PetscFree((*c)->queue));
47   PetscCall(PetscFree((*c)->queueLocal));
48   PetscCall(PetscFree((*c)->queueRemote));
49   PetscCall(PetscFree((*c)->neighbors));
50   PetscCall(PetscFree((*c)->needCount));
51   PetscCall(PetscFree((*c)->localOffsets));
52   PetscCall(PetscFree((*c)->fillCount));
53   PetscCall(PetscFree((*c)->remoteOffsets));
54   PetscCall(PetscFree((*c)->request));
55   PetscCall(PetscFree((*c)->status));
56   PetscCall(PetscHeaderDestroy(c));
57   PetscFunctionReturn(PETSC_SUCCESS);
58 }
59 
60 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
61 {
62   Characteristic newC;
63 
64   PetscFunctionBegin;
65   PetscAssertPointer(c, 2);
66   *c = NULL;
67   PetscCall(CharacteristicInitializePackage());
68 
69   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
70   *c = newC;
71 
72   newC->structured          = PETSC_TRUE;
73   newC->numIds              = 0;
74   newC->velocityDA          = NULL;
75   newC->velocity            = NULL;
76   newC->velocityOld         = NULL;
77   newC->numVelocityComp     = 0;
78   newC->velocityComp        = NULL;
79   newC->velocityInterp      = NULL;
80   newC->velocityInterpLocal = NULL;
81   newC->velocityCtx         = NULL;
82   newC->fieldDA             = NULL;
83   newC->field               = NULL;
84   newC->numFieldComp        = 0;
85   newC->fieldComp           = NULL;
86   newC->fieldInterp         = NULL;
87   newC->fieldInterpLocal    = NULL;
88   newC->fieldCtx            = NULL;
89   newC->itemType            = 0;
90   newC->queue               = NULL;
91   newC->queueSize           = 0;
92   newC->queueMax            = 0;
93   newC->queueLocal          = NULL;
94   newC->queueLocalSize      = 0;
95   newC->queueLocalMax       = 0;
96   newC->queueRemote         = NULL;
97   newC->queueRemoteSize     = 0;
98   newC->queueRemoteMax      = 0;
99   newC->numNeighbors        = 0;
100   newC->neighbors           = NULL;
101   newC->needCount           = NULL;
102   newC->localOffsets        = NULL;
103   newC->fillCount           = NULL;
104   newC->remoteOffsets       = NULL;
105   newC->request             = NULL;
106   newC->status              = NULL;
107   PetscFunctionReturn(PETSC_SUCCESS);
108 }
109 
110 /*@C
111   CharacteristicSetType - Builds Characteristic for a particular solver.
112 
113   Logically Collective
114 
115   Input Parameters:
116 + c    - the method of characteristics context
117 - type - a known method
118 
119   Options Database Key:
120 . -characteristic_type <method> - Sets the method; use -help for a list
121     of available methods
122 
123   Level: intermediate
124 
125   Notes:
126   See "include/petsccharacteristic.h" for available methods
127 
128   Normally, it is best to use the CharacteristicSetFromOptions() command and
129   then set the Characteristic type from the options database rather than by using
130   this routine.  Using the options database provides the user with
131   maximum flexibility in evaluating the many different Krylov methods.
132   The CharacteristicSetType() routine is provided for those situations where it
133   is necessary to set the iterative solver independently of the command
134   line or options database.  This might be the case, for example, when
135   the choice of iterative solver changes during the execution of the
136   program, and the user's application is taking responsibility for
137   choosing the appropriate method.  In other words, this routine is
138   not for beginners.
139 
140 .seealso: [](ch_ts), `CharacteristicType`
141 @*/
142 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
143 {
144   PetscBool match;
145   PetscErrorCode (*r)(Characteristic);
146 
147   PetscFunctionBegin;
148   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
149   PetscAssertPointer(type, 2);
150 
151   PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match));
152   if (match) PetscFunctionReturn(PETSC_SUCCESS);
153 
154   if (c->data) {
155     /* destroy the old private Characteristic context */
156     PetscUseTypeMethod(c, destroy);
157     c->ops->destroy = NULL;
158     c->data         = NULL;
159   }
160 
161   PetscCall(PetscFunctionListFind(CharacteristicList, type, &r));
162   PetscCheck(r, PetscObjectComm((PetscObject)c), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
163   c->setupcalled = 0;
164   PetscCall((*r)(c));
165   PetscCall(PetscObjectChangeTypeName((PetscObject)c, type));
166   PetscFunctionReturn(PETSC_SUCCESS);
167 }
168 
169 /*@
170   CharacteristicSetUp - Sets up the internal data structures for the
171   later use of an iterative solver.
172 
173   Collective
174 
175   Input Parameter:
176 . c - iterative context obtained from CharacteristicCreate()
177 
178   Level: developer
179 
180 .seealso: [](ch_ts), `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
181 @*/
182 PetscErrorCode CharacteristicSetUp(Characteristic c)
183 {
184   PetscFunctionBegin;
185   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
186 
187   if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));
188 
189   if (c->setupcalled == 2) PetscFunctionReturn(PETSC_SUCCESS);
190 
191   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
192   if (!c->setupcalled) PetscUseTypeMethod(c, setup);
193   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
194   c->setupcalled = 2;
195   PetscFunctionReturn(PETSC_SUCCESS);
196 }
197 
198 /*@C
199   CharacteristicRegister -  Adds a solver to the method of characteristics package.
200 
201   Not Collective
202 
203   Input Parameters:
204 + sname    - name of a new user-defined solver
205 - function - routine to create method context
206 
207   Level: advanced
208 
209   Example Usage:
210 .vb
211     CharacteristicRegister("my_char", MyCharCreate);
212 .ve
213 
214   Then, your Characteristic type can be chosen with the procedural interface via
215 .vb
216     CharacteristicCreate(MPI_Comm, Characteristic* &char);
217     CharacteristicSetType(char,"my_char");
218 .ve
219   or at runtime via the option
220 .vb
221     -characteristic_type my_char
222 .ve
223 
224   Notes:
225   CharacteristicRegister() may be called multiple times to add several user-defined solvers.
226 
227 .seealso: [](ch_ts), `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
228 @*/
229 PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
230 {
231   PetscFunctionBegin;
232   PetscCall(CharacteristicInitializePackage());
233   PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function));
234   PetscFunctionReturn(PETSC_SUCCESS);
235 }
236 
237 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
238 {
239   PetscFunctionBegin;
240   c->velocityDA      = da;
241   c->velocity        = v;
242   c->velocityOld     = vOld;
243   c->numVelocityComp = numComponents;
244   c->velocityComp    = components;
245   c->velocityInterp  = interp;
246   c->velocityCtx     = ctx;
247   PetscFunctionReturn(PETSC_SUCCESS);
248 }
249 
250 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
251 {
252   PetscFunctionBegin;
253   c->velocityDA          = da;
254   c->velocity            = v;
255   c->velocityOld         = vOld;
256   c->numVelocityComp     = numComponents;
257   c->velocityComp        = components;
258   c->velocityInterpLocal = interp;
259   c->velocityCtx         = ctx;
260   PetscFunctionReturn(PETSC_SUCCESS);
261 }
262 
263 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
264 {
265   PetscFunctionBegin;
266 #if 0
267   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
268 #endif
269   c->fieldDA      = da;
270   c->field        = v;
271   c->numFieldComp = numComponents;
272   c->fieldComp    = components;
273   c->fieldInterp  = interp;
274   c->fieldCtx     = ctx;
275   PetscFunctionReturn(PETSC_SUCCESS);
276 }
277 
278 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
279 {
280   PetscFunctionBegin;
281 #if 0
282   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
283 #endif
284   c->fieldDA          = da;
285   c->field            = v;
286   c->numFieldComp     = numComponents;
287   c->fieldComp        = components;
288   c->fieldInterpLocal = interp;
289   c->fieldCtx         = ctx;
290   PetscFunctionReturn(PETSC_SUCCESS);
291 }
292 
293 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
294 {
295   CharacteristicPointDA2D Qi;
296   DM                      da = c->velocityDA;
297   Vec                     velocityLocal, velocityLocalOld;
298   Vec                     fieldLocal;
299   DMDALocalInfo           info;
300   PetscScalar           **solArray;
301   void                   *velocityArray;
302   void                   *velocityArrayOld;
303   void                   *fieldArray;
304   PetscScalar            *interpIndices;
305   PetscScalar            *velocityValues, *velocityValuesOld;
306   PetscScalar            *fieldValues;
307   PetscMPIInt             rank;
308   PetscInt                dim;
309   PetscMPIInt             neighbors[9];
310   PetscInt                dof;
311   PetscInt                gx, gy;
312   PetscInt                n, is, ie, js, je, comp;
313 
314   PetscFunctionBegin;
315   c->queueSize = 0;
316   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
317   PetscCall(DMDAGetNeighborsRank(da, neighbors));
318   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
319   PetscCall(CharacteristicSetUp(c));
320   /* global and local grid info */
321   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
322   PetscCall(DMDAGetLocalInfo(da, &info));
323   is = info.xs;
324   ie = info.xs + info.xm;
325   js = info.ys;
326   je = info.ys + info.ym;
327   /* Allocation */
328   PetscCall(PetscMalloc1(dim, &interpIndices));
329   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
330   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
331   PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues));
332   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));
333 
334   /* -----------------------------------------------------------------------
335      PART 1, AT t-dt/2
336      -----------------------------------------------------------------------*/
337   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
338   /* GET POSITION AT HALF TIME IN THE PAST */
339   if (c->velocityInterpLocal) {
340     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
341     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
342     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
343     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
344     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
345     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
346     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray));
347     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
348   }
349   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
350   for (Qi.j = js; Qi.j < je; Qi.j++) {
351     for (Qi.i = is; Qi.i < ie; Qi.i++) {
352       interpIndices[0] = Qi.i;
353       interpIndices[1] = Qi.j;
354       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
355       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
356       Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
357       Qi.y = Qi.j - velocityValues[1] * dt / 2.0;
358 
359       /* Determine whether the position at t - dt/2 is local */
360       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
361 
362       /* Check for Periodic boundaries and move all periodic points back onto the domain */
363       PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
364       PetscCall(CharacteristicAddPoint(c, &Qi));
365     }
366   }
367   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
368 
369   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
370   PetscCall(CharacteristicSendCoordinatesBegin(c));
371   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
372 
373   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
374   /* Calculate velocity at t_n+1/2 (local values) */
375   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
376   for (n = 0; n < c->queueSize; n++) {
377     Qi = c->queue[n];
378     if (c->neighbors[Qi.proc] == rank) {
379       interpIndices[0] = Qi.x;
380       interpIndices[1] = Qi.y;
381       if (c->velocityInterpLocal) {
382         PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
383         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
384       } else {
385         PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
386         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
387       }
388       Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
389       Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
390     }
391     c->queue[n] = Qi;
392   }
393   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
394 
395   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
396   PetscCall(CharacteristicSendCoordinatesEnd(c));
397   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
398 
399   /* Calculate velocity at t_n+1/2 (fill remote requests) */
400   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
401   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
402   for (n = 0; n < c->queueRemoteSize; n++) {
403     Qi               = c->queueRemote[n];
404     interpIndices[0] = Qi.x;
405     interpIndices[1] = Qi.y;
406     if (c->velocityInterpLocal) {
407       PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
408       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
409     } else {
410       PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
411       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
412     }
413     Qi.x              = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
414     Qi.y              = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
415     c->queueRemote[n] = Qi;
416   }
417   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
418   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
419   PetscCall(CharacteristicGetValuesBegin(c));
420   PetscCall(CharacteristicGetValuesEnd(c));
421   if (c->velocityInterpLocal) {
422     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray));
423     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
424     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
425     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
426   }
427   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
428 
429   /* -----------------------------------------------------------------------
430      PART 2, AT t-dt
431      -----------------------------------------------------------------------*/
432 
433   /* GET POSITION AT t_n (local values) */
434   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
435   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
436   for (n = 0; n < c->queueSize; n++) {
437     Qi   = c->queue[n];
438     Qi.x = Qi.i - Qi.x * dt;
439     Qi.y = Qi.j - Qi.y * dt;
440 
441     /* Determine whether the position at t-dt is local */
442     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
443 
444     /* Check for Periodic boundaries and move all periodic points back onto the domain */
445     PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
446 
447     c->queue[n] = Qi;
448   }
449   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
450 
451   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
452   PetscCall(CharacteristicSendCoordinatesBegin(c));
453   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
454 
455   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
456   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
457   if (c->fieldInterpLocal) {
458     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
459     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
460     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
461     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
462   }
463   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
464   for (n = 0; n < c->queueSize; n++) {
465     if (c->neighbors[c->queue[n].proc] == rank) {
466       interpIndices[0] = c->queue[n].x;
467       interpIndices[1] = c->queue[n].y;
468       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
469       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
470       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
471     }
472   }
473   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
474 
475   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
476   PetscCall(CharacteristicSendCoordinatesEnd(c));
477   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
478 
479   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
480   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
481   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
482   for (n = 0; n < c->queueRemoteSize; n++) {
483     interpIndices[0] = c->queueRemote[n].x;
484     interpIndices[1] = c->queueRemote[n].y;
485 
486     /* for debugging purposes */
487     if (1) { /* hacked bounds test...let's do better */
488       PetscScalar im = interpIndices[0];
489       PetscScalar jm = interpIndices[1];
490 
491       PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
492     }
493 
494     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
495     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
496     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
497   }
498   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
499 
500   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
501   PetscCall(CharacteristicGetValuesBegin(c));
502   PetscCall(CharacteristicGetValuesEnd(c));
503   if (c->fieldInterpLocal) {
504     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
505     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
506   }
507   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
508 
509   /* Return field of characteristics at t_n-1 */
510   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
511   PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
512   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
513   for (n = 0; n < c->queueSize; n++) {
514     Qi = c->queue[n];
515     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
516   }
517   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
518   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
519   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));
520 
521   /* Cleanup */
522   PetscCall(PetscFree(interpIndices));
523   PetscCall(PetscFree(velocityValues));
524   PetscCall(PetscFree(velocityValuesOld));
525   PetscCall(PetscFree(fieldValues));
526   PetscFunctionReturn(PETSC_SUCCESS);
527 }
528 
529 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
530 {
531   PetscFunctionBegin;
532   c->numNeighbors = numNeighbors;
533   PetscCall(PetscFree(c->neighbors));
534   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
535   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
536   PetscFunctionReturn(PETSC_SUCCESS);
537 }
538 
539 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
540 {
541   PetscFunctionBegin;
542   PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
543   c->queue[c->queueSize++] = *point;
544   PetscFunctionReturn(PETSC_SUCCESS);
545 }
546 
547 PetscErrorCode CharacteristicSendCoordinatesBegin(Characteristic c)
548 {
549   PetscMPIInt rank, tag = 121;
550   PetscInt    i, n;
551 
552   PetscFunctionBegin;
553   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
554   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
555   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
556   for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
557   c->fillCount[0] = 0;
558   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
559   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
560   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
561   /* Initialize the remote queue */
562   c->queueLocalMax = c->localOffsets[0] = 0;
563   c->queueRemoteMax = c->remoteOffsets[0] = 0;
564   for (n = 1; n < c->numNeighbors; n++) {
565     c->remoteOffsets[n] = c->queueRemoteMax;
566     c->queueRemoteMax += c->fillCount[n];
567     c->localOffsets[n] = c->queueLocalMax;
568     c->queueLocalMax += c->needCount[n];
569   }
570   /* HACK BEGIN */
571   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
572   c->needCount[0] = 0;
573   /* HACK END */
574   if (c->queueRemoteMax) {
575     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
576   } else c->queueRemote = NULL;
577   c->queueRemoteSize = c->queueRemoteMax;
578 
579   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
580   for (n = 1; n < c->numNeighbors; n++) {
581     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
582     PetscCallMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
583   }
584   for (n = 1; n < c->numNeighbors; n++) {
585     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
586     PetscCallMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
587   }
588   PetscFunctionReturn(PETSC_SUCCESS);
589 }
590 
591 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
592 {
593 #if 0
594   PetscMPIInt rank;
595   PetscInt    n;
596 #endif
597 
598   PetscFunctionBegin;
599   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
600 #if 0
601   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
602   for (n = 0; n < c->queueRemoteSize; n++) {
603     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
604   }
605 #endif
606   PetscFunctionReturn(PETSC_SUCCESS);
607 }
608 
609 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
610 {
611   PetscMPIInt tag = 121;
612   PetscInt    n;
613 
614   PetscFunctionBegin;
615   /* SEND AND RECEIVE FILLED REQUESTS for velocities at t_n+1/2 */
616   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
617   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
618   PetscFunctionReturn(PETSC_SUCCESS);
619 }
620 
621 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
622 {
623   PetscFunctionBegin;
624   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
625   /* Free queue of requests from other procs */
626   PetscCall(PetscFree(c->queueRemote));
627   PetscFunctionReturn(PETSC_SUCCESS);
628 }
629 
630 /*---------------------------------------------------------------------*/
631 /*
632   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
633 */
634 static PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
635 /*---------------------------------------------------------------------*/
636 {
637   CharacteristicPointDA2D temp;
638   PetscInt                n;
639 
640   PetscFunctionBegin;
641   if (0) { /* Check the order of the queue before sorting */
642     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
643     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
644   }
645 
646   /* SORTING PHASE */
647   for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ }
648   for (n = size - 1; n >= 1; n--) {
649     temp     = queue[0];
650     queue[0] = queue[n];
651     queue[n] = temp;
652     PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1));
653   }
654   if (0) { /* Check the order of the queue after sorting */
655     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
656     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
657   }
658   PetscFunctionReturn(PETSC_SUCCESS);
659 }
660 
661 /*---------------------------------------------------------------------*/
662 /*
663   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
664 */
665 static PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
666 /*---------------------------------------------------------------------*/
667 {
668   PetscBool               done = PETSC_FALSE;
669   PetscInt                maxChild;
670   CharacteristicPointDA2D temp;
671 
672   PetscFunctionBegin;
673   while ((root * 2 <= bottom) && (!done)) {
674     if (root * 2 == bottom) maxChild = root * 2;
675     else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
676     else maxChild = root * 2 + 1;
677 
678     if (queue[root].proc < queue[maxChild].proc) {
679       temp            = queue[root];
680       queue[root]     = queue[maxChild];
681       queue[maxChild] = temp;
682       root            = maxChild;
683     } else done = PETSC_TRUE;
684   }
685   PetscFunctionReturn(PETSC_SUCCESS);
686 }
687 
688 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
689 static PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
690 {
691   DMBoundaryType bx, by;
692   PetscBool      IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
693   MPI_Comm       comm;
694   PetscMPIInt    rank;
695   PetscInt     **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ;
696 
697   PetscFunctionBegin;
698   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
699   PetscCallMPI(MPI_Comm_rank(comm, &rank));
700   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL));
701 
702   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
703   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
704 
705   neighbors[0] = rank;
706   rank         = 0;
707   PetscCall(PetscMalloc1(PJ, &procs));
708   for (pj = 0; pj < PJ; pj++) {
709     PetscCall(PetscMalloc1(PI, &(procs[pj])));
710     for (pi = 0; pi < PI; pi++) {
711       procs[pj][pi] = rank;
712       rank++;
713     }
714   }
715 
716   pi  = neighbors[0] % PI;
717   pj  = neighbors[0] / PI;
718   pim = pi - 1;
719   if (pim < 0) pim = PI - 1;
720   pip = (pi + 1) % PI;
721   pjm = pj - 1;
722   if (pjm < 0) pjm = PJ - 1;
723   pjp = (pj + 1) % PJ;
724 
725   neighbors[1] = procs[pj][pim];
726   neighbors[2] = procs[pjp][pim];
727   neighbors[3] = procs[pjp][pi];
728   neighbors[4] = procs[pjp][pip];
729   neighbors[5] = procs[pj][pip];
730   neighbors[6] = procs[pjm][pip];
731   neighbors[7] = procs[pjm][pi];
732   neighbors[8] = procs[pjm][pim];
733 
734   if (!IPeriodic) {
735     if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
736     if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
737   }
738 
739   if (!JPeriodic) {
740     if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
741     if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
742   }
743 
744   for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj]));
745   PetscCall(PetscFree(procs));
746   PetscFunctionReturn(PETSC_SUCCESS);
747 }
748 
749 /*
750   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
751     2 | 3 | 4
752     __|___|__
753     1 | 0 | 5
754     __|___|__
755     8 | 7 | 6
756       |   |
757 */
758 static PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
759 {
760   DMDALocalInfo info;
761   PetscReal     is, ie, js, je;
762 
763   PetscCall(DMDAGetLocalInfo(da, &info));
764   is = (PetscReal)info.xs - 0.5;
765   ie = (PetscReal)info.xs + info.xm - 0.5;
766   js = (PetscReal)info.ys - 0.5;
767   je = (PetscReal)info.ys + info.ym - 0.5;
768 
769   if (ir >= is && ir <= ie) { /* center column */
770     if (jr >= js && jr <= je) return 0;
771     else if (jr < js) return 7;
772     else return 3;
773   } else if (ir < is) { /* left column */
774     if (jr >= js && jr <= je) return 1;
775     else if (jr < js) return 8;
776     else return 2;
777   } else { /* right column */
778     if (jr >= js && jr <= je) return 5;
779     else if (jr < js) return 6;
780     else return 4;
781   }
782 }
783