xref: /petsc/src/ts/characteristic/interface/characteristic.c (revision 862e4a309d45a165aaa4da0d704ba733429d833a)
1 
2 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3 #include <petscdmda.h>
4 #include <petscviewer.h>
5 
6 PetscClassId  CHARACTERISTIC_CLASSID;
7 PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8 PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9 PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10 /*
11    Contains the list of registered characteristic routines
12 */
13 PetscFunctionList CharacteristicList              = NULL;
14 PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;
15 
16 PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
17 PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18 PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar[]);
19 
20 PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21 PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
22 
23 PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24 {
25   PetscBool iascii;
26 
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
29   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer));
30   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
31   PetscCheckSameComm(c, 1, viewer, 2);
32 
33   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
34   if (!iascii) PetscTryTypeMethod(c, view, viewer);
35   PetscFunctionReturn(0);
36 }
37 
38 PetscErrorCode CharacteristicDestroy(Characteristic *c)
39 {
40   PetscFunctionBegin;
41   if (!*c) PetscFunctionReturn(0);
42   PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1);
43   if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(0);
44 
45   if ((*c)->ops->destroy) PetscCall((*(*c)->ops->destroy)((*c)));
46   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
47   PetscCall(PetscFree((*c)->queue));
48   PetscCall(PetscFree((*c)->queueLocal));
49   PetscCall(PetscFree((*c)->queueRemote));
50   PetscCall(PetscFree((*c)->neighbors));
51   PetscCall(PetscFree((*c)->needCount));
52   PetscCall(PetscFree((*c)->localOffsets));
53   PetscCall(PetscFree((*c)->fillCount));
54   PetscCall(PetscFree((*c)->remoteOffsets));
55   PetscCall(PetscFree((*c)->request));
56   PetscCall(PetscFree((*c)->status));
57   PetscCall(PetscHeaderDestroy(c));
58   PetscFunctionReturn(0);
59 }
60 
61 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
62 {
63   Characteristic newC;
64 
65   PetscFunctionBegin;
66   PetscValidPointer(c, 2);
67   *c = NULL;
68   PetscCall(CharacteristicInitializePackage());
69 
70   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
71   *c = newC;
72 
73   newC->structured          = PETSC_TRUE;
74   newC->numIds              = 0;
75   newC->velocityDA          = NULL;
76   newC->velocity            = NULL;
77   newC->velocityOld         = NULL;
78   newC->numVelocityComp     = 0;
79   newC->velocityComp        = NULL;
80   newC->velocityInterp      = NULL;
81   newC->velocityInterpLocal = NULL;
82   newC->velocityCtx         = NULL;
83   newC->fieldDA             = NULL;
84   newC->field               = NULL;
85   newC->numFieldComp        = 0;
86   newC->fieldComp           = NULL;
87   newC->fieldInterp         = NULL;
88   newC->fieldInterpLocal    = NULL;
89   newC->fieldCtx            = NULL;
90   newC->itemType            = 0;
91   newC->queue               = NULL;
92   newC->queueSize           = 0;
93   newC->queueMax            = 0;
94   newC->queueLocal          = NULL;
95   newC->queueLocalSize      = 0;
96   newC->queueLocalMax       = 0;
97   newC->queueRemote         = NULL;
98   newC->queueRemoteSize     = 0;
99   newC->queueRemoteMax      = 0;
100   newC->numNeighbors        = 0;
101   newC->neighbors           = NULL;
102   newC->needCount           = NULL;
103   newC->localOffsets        = NULL;
104   newC->fillCount           = NULL;
105   newC->remoteOffsets       = NULL;
106   newC->request             = NULL;
107   newC->status              = NULL;
108   PetscFunctionReturn(0);
109 }
110 
111 /*@C
112    CharacteristicSetType - Builds Characteristic for a particular solver.
113 
114    Logically Collective on Characteristic
115 
116    Input Parameters:
117 +  c    - the method of characteristics context
118 -  type - a known method
119 
120    Options Database Key:
121 .  -characteristic_type <method> - Sets the method; use -help for a list
122     of available methods
123 
124    Notes:
125    See "include/petsccharacteristic.h" for available methods
126 
127   Normally, it is best to use the CharacteristicSetFromOptions() command and
128   then set the Characteristic type from the options database rather than by using
129   this routine.  Using the options database provides the user with
130   maximum flexibility in evaluating the many different Krylov methods.
131   The CharacteristicSetType() routine is provided for those situations where it
132   is necessary to set the iterative solver independently of the command
133   line or options database.  This might be the case, for example, when
134   the choice of iterative solver changes during the execution of the
135   program, and the user's application is taking responsibility for
136   choosing the appropriate method.  In other words, this routine is
137   not for beginners.
138 
139   Level: intermediate
140 
141 .seealso: `CharacteristicType`
142 
143 @*/
144 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
145 {
146   PetscBool match;
147   PetscErrorCode (*r)(Characteristic);
148 
149   PetscFunctionBegin;
150   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
151   PetscValidCharPointer(type, 2);
152 
153   PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match));
154   if (match) PetscFunctionReturn(0);
155 
156   if (c->data) {
157     /* destroy the old private Characteristic context */
158     PetscUseTypeMethod(c, destroy);
159     c->ops->destroy = NULL;
160     c->data         = NULL;
161   }
162 
163   PetscCall(PetscFunctionListFind(CharacteristicList, type, &r));
164   PetscCheck(r, PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
165   c->setupcalled = 0;
166   PetscCall((*r)(c));
167   PetscCall(PetscObjectChangeTypeName((PetscObject)c, type));
168   PetscFunctionReturn(0);
169 }
170 
171 /*@
172    CharacteristicSetUp - Sets up the internal data structures for the
173    later use of an iterative solver.
174 
175    Collective on Characteristic
176 
177    Input Parameter:
178 .  ksp   - iterative context obtained from CharacteristicCreate()
179 
180    Level: developer
181 
182 .seealso: `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
183 @*/
184 PetscErrorCode CharacteristicSetUp(Characteristic c)
185 {
186   PetscFunctionBegin;
187   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
188 
189   if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));
190 
191   if (c->setupcalled == 2) PetscFunctionReturn(0);
192 
193   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
194   if (!c->setupcalled) PetscUseTypeMethod(c, setup);
195   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
196   c->setupcalled = 2;
197   PetscFunctionReturn(0);
198 }
199 
200 /*@C
201   CharacteristicRegister -  Adds a solver to the method of characteristics package.
202 
203    Not Collective
204 
205    Input Parameters:
206 +  name_solver - name of a new user-defined solver
207 -  routine_create - routine to create method context
208 
209   Sample usage:
210 .vb
211     CharacteristicRegister("my_char", MyCharCreate);
212 .ve
213 
214   Then, your Characteristic type can be chosen with the procedural interface via
215 .vb
216     CharacteristicCreate(MPI_Comm, Characteristic* &char);
217     CharacteristicSetType(char,"my_char");
218 .ve
219    or at runtime via the option
220 .vb
221     -characteristic_type my_char
222 .ve
223 
224    Notes:
225    CharacteristicRegister() may be called multiple times to add several user-defined solvers.
226 
227 .seealso: `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
228 
229   Level: advanced
230 @*/
231 PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic))
232 {
233   PetscFunctionBegin;
234   PetscCall(CharacteristicInitializePackage());
235   PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function));
236   PetscFunctionReturn(0);
237 }
238 
239 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
240 {
241   PetscFunctionBegin;
242   c->velocityDA      = da;
243   c->velocity        = v;
244   c->velocityOld     = vOld;
245   c->numVelocityComp = numComponents;
246   c->velocityComp    = components;
247   c->velocityInterp  = interp;
248   c->velocityCtx     = ctx;
249   PetscFunctionReturn(0);
250 }
251 
252 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
253 {
254   PetscFunctionBegin;
255   c->velocityDA          = da;
256   c->velocity            = v;
257   c->velocityOld         = vOld;
258   c->numVelocityComp     = numComponents;
259   c->velocityComp        = components;
260   c->velocityInterpLocal = interp;
261   c->velocityCtx         = ctx;
262   PetscFunctionReturn(0);
263 }
264 
265 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
266 {
267   PetscFunctionBegin;
268 #if 0
269   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
270 #endif
271   c->fieldDA      = da;
272   c->field        = v;
273   c->numFieldComp = numComponents;
274   c->fieldComp    = components;
275   c->fieldInterp  = interp;
276   c->fieldCtx     = ctx;
277   PetscFunctionReturn(0);
278 }
279 
280 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx)
281 {
282   PetscFunctionBegin;
283 #if 0
284   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
285 #endif
286   c->fieldDA          = da;
287   c->field            = v;
288   c->numFieldComp     = numComponents;
289   c->fieldComp        = components;
290   c->fieldInterpLocal = interp;
291   c->fieldCtx         = ctx;
292   PetscFunctionReturn(0);
293 }
294 
295 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
296 {
297   CharacteristicPointDA2D Qi;
298   DM                      da = c->velocityDA;
299   Vec                     velocityLocal, velocityLocalOld;
300   Vec                     fieldLocal;
301   DMDALocalInfo           info;
302   PetscScalar           **solArray;
303   void                   *velocityArray;
304   void                   *velocityArrayOld;
305   void                   *fieldArray;
306   PetscScalar            *interpIndices;
307   PetscScalar            *velocityValues, *velocityValuesOld;
308   PetscScalar            *fieldValues;
309   PetscMPIInt             rank;
310   PetscInt                dim;
311   PetscMPIInt             neighbors[9];
312   PetscInt                dof;
313   PetscInt                gx, gy;
314   PetscInt                n, is, ie, js, je, comp;
315 
316   PetscFunctionBegin;
317   c->queueSize = 0;
318   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
319   PetscCall(DMDAGetNeighborsRank(da, neighbors));
320   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
321   PetscCall(CharacteristicSetUp(c));
322   /* global and local grid info */
323   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
324   PetscCall(DMDAGetLocalInfo(da, &info));
325   is = info.xs;
326   ie = info.xs + info.xm;
327   js = info.ys;
328   je = info.ys + info.ym;
329   /* Allocation */
330   PetscCall(PetscMalloc1(dim, &interpIndices));
331   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
332   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
333   PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues));
334   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));
335 
336   /* -----------------------------------------------------------------------
337      PART 1, AT t-dt/2
338      -----------------------------------------------------------------------*/
339   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
340   /* GET POSITION AT HALF TIME IN THE PAST */
341   if (c->velocityInterpLocal) {
342     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
343     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
344     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
345     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
346     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
347     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
348     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray));
349     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
350   }
351   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
352   for (Qi.j = js; Qi.j < je; Qi.j++) {
353     for (Qi.i = is; Qi.i < ie; Qi.i++) {
354       interpIndices[0] = Qi.i;
355       interpIndices[1] = Qi.j;
356       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
357       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
358       Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
359       Qi.y = Qi.j - velocityValues[1] * dt / 2.0;
360 
361       /* Determine whether the position at t - dt/2 is local */
362       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
363 
364       /* Check for Periodic boundaries and move all periodic points back onto the domain */
365       PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
366       PetscCall(CharacteristicAddPoint(c, &Qi));
367     }
368   }
369   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
370 
371   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
372   PetscCall(CharacteristicSendCoordinatesBegin(c));
373   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
374 
375   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
376   /* Calculate velocity at t_n+1/2 (local values) */
377   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
378   for (n = 0; n < c->queueSize; n++) {
379     Qi = c->queue[n];
380     if (c->neighbors[Qi.proc] == rank) {
381       interpIndices[0] = Qi.x;
382       interpIndices[1] = Qi.y;
383       if (c->velocityInterpLocal) {
384         PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
385         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
386       } else {
387         PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
388         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
389       }
390       Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
391       Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
392     }
393     c->queue[n] = Qi;
394   }
395   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
396 
397   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
398   PetscCall(CharacteristicSendCoordinatesEnd(c));
399   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
400 
401   /* Calculate velocity at t_n+1/2 (fill remote requests) */
402   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
403   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
404   for (n = 0; n < c->queueRemoteSize; n++) {
405     Qi               = c->queueRemote[n];
406     interpIndices[0] = Qi.x;
407     interpIndices[1] = Qi.y;
408     if (c->velocityInterpLocal) {
409       PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
410       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
411     } else {
412       PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
413       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
414     }
415     Qi.x              = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
416     Qi.y              = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
417     c->queueRemote[n] = Qi;
418   }
419   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
420   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
421   PetscCall(CharacteristicGetValuesBegin(c));
422   PetscCall(CharacteristicGetValuesEnd(c));
423   if (c->velocityInterpLocal) {
424     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray));
425     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
426     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
427     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
428   }
429   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
430 
431   /* -----------------------------------------------------------------------
432      PART 2, AT t-dt
433      -----------------------------------------------------------------------*/
434 
435   /* GET POSITION AT t_n (local values) */
436   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
437   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
438   for (n = 0; n < c->queueSize; n++) {
439     Qi   = c->queue[n];
440     Qi.x = Qi.i - Qi.x * dt;
441     Qi.y = Qi.j - Qi.y * dt;
442 
443     /* Determine whether the position at t-dt is local */
444     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
445 
446     /* Check for Periodic boundaries and move all periodic points back onto the domain */
447     PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
448 
449     c->queue[n] = Qi;
450   }
451   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
452 
453   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
454   PetscCall(CharacteristicSendCoordinatesBegin(c));
455   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
456 
457   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
458   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
459   if (c->fieldInterpLocal) {
460     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
461     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
462     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
463     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
464   }
465   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
466   for (n = 0; n < c->queueSize; n++) {
467     if (c->neighbors[c->queue[n].proc] == rank) {
468       interpIndices[0] = c->queue[n].x;
469       interpIndices[1] = c->queue[n].y;
470       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
471       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
472       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
473     }
474   }
475   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
476 
477   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
478   PetscCall(CharacteristicSendCoordinatesEnd(c));
479   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
480 
481   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
482   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
483   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
484   for (n = 0; n < c->queueRemoteSize; n++) {
485     interpIndices[0] = c->queueRemote[n].x;
486     interpIndices[1] = c->queueRemote[n].y;
487 
488     /* for debugging purposes */
489     if (1) { /* hacked bounds test...let's do better */
490       PetscScalar im = interpIndices[0];
491       PetscScalar jm = interpIndices[1];
492 
493       PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
494     }
495 
496     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
497     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
498     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
499   }
500   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
501 
502   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
503   PetscCall(CharacteristicGetValuesBegin(c));
504   PetscCall(CharacteristicGetValuesEnd(c));
505   if (c->fieldInterpLocal) {
506     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
507     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
508   }
509   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
510 
511   /* Return field of characteristics at t_n-1 */
512   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
513   PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
514   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
515   for (n = 0; n < c->queueSize; n++) {
516     Qi = c->queue[n];
517     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
518   }
519   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
520   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
521   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));
522 
523   /* Cleanup */
524   PetscCall(PetscFree(interpIndices));
525   PetscCall(PetscFree(velocityValues));
526   PetscCall(PetscFree(velocityValuesOld));
527   PetscCall(PetscFree(fieldValues));
528   PetscFunctionReturn(0);
529 }
530 
531 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
532 {
533   PetscFunctionBegin;
534   c->numNeighbors = numNeighbors;
535   PetscCall(PetscFree(c->neighbors));
536   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
537   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
538   PetscFunctionReturn(0);
539 }
540 
541 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
542 {
543   PetscFunctionBegin;
544   PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
545   c->queue[c->queueSize++] = *point;
546   PetscFunctionReturn(0);
547 }
548 
549 int CharacteristicSendCoordinatesBegin(Characteristic c)
550 {
551   PetscMPIInt rank, tag = 121;
552   PetscInt    i, n;
553 
554   PetscFunctionBegin;
555   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
556   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
557   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
558   for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
559   c->fillCount[0] = 0;
560   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
561   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
562   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
563   /* Initialize the remote queue */
564   c->queueLocalMax = c->localOffsets[0] = 0;
565   c->queueRemoteMax = c->remoteOffsets[0] = 0;
566   for (n = 1; n < c->numNeighbors; n++) {
567     c->remoteOffsets[n] = c->queueRemoteMax;
568     c->queueRemoteMax += c->fillCount[n];
569     c->localOffsets[n] = c->queueLocalMax;
570     c->queueLocalMax += c->needCount[n];
571   }
572   /* HACK BEGIN */
573   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
574   c->needCount[0] = 0;
575   /* HACK END */
576   if (c->queueRemoteMax) {
577     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
578   } else c->queueRemote = NULL;
579   c->queueRemoteSize = c->queueRemoteMax;
580 
581   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
582   for (n = 1; n < c->numNeighbors; n++) {
583     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
584     PetscCallMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
585   }
586   for (n = 1; n < c->numNeighbors; n++) {
587     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
588     PetscCallMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
589   }
590   PetscFunctionReturn(0);
591 }
592 
593 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
594 {
595 #if 0
596   PetscMPIInt rank;
597   PetscInt    n;
598 #endif
599 
600   PetscFunctionBegin;
601   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
602 #if 0
603   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
604   for (n = 0; n < c->queueRemoteSize; n++) {
605     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
606   }
607 #endif
608   PetscFunctionReturn(0);
609 }
610 
611 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
612 {
613   PetscMPIInt tag = 121;
614   PetscInt    n;
615 
616   PetscFunctionBegin;
617   /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
618   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
619   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
620   PetscFunctionReturn(0);
621 }
622 
623 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
624 {
625   PetscFunctionBegin;
626   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
627   /* Free queue of requests from other procs */
628   PetscCall(PetscFree(c->queueRemote));
629   PetscFunctionReturn(0);
630 }
631 
632 /*---------------------------------------------------------------------*/
633 /*
634   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
635 */
636 PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
637 /*---------------------------------------------------------------------*/
638 {
639   CharacteristicPointDA2D temp;
640   PetscInt                n;
641 
642   PetscFunctionBegin;
643   if (0) { /* Check the order of the queue before sorting */
644     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
645     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
646   }
647 
648   /* SORTING PHASE */
649   for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ }
650   for (n = size - 1; n >= 1; n--) {
651     temp     = queue[0];
652     queue[0] = queue[n];
653     queue[n] = temp;
654     PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1));
655   }
656   if (0) { /* Check the order of the queue after sorting */
657     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
658     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
659   }
660   PetscFunctionReturn(0);
661 }
662 
663 /*---------------------------------------------------------------------*/
664 /*
665   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
666 */
667 PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
668 /*---------------------------------------------------------------------*/
669 {
670   PetscBool               done = PETSC_FALSE;
671   PetscInt                maxChild;
672   CharacteristicPointDA2D temp;
673 
674   PetscFunctionBegin;
675   while ((root * 2 <= bottom) && (!done)) {
676     if (root * 2 == bottom) maxChild = root * 2;
677     else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
678     else maxChild = root * 2 + 1;
679 
680     if (queue[root].proc < queue[maxChild].proc) {
681       temp            = queue[root];
682       queue[root]     = queue[maxChild];
683       queue[maxChild] = temp;
684       root            = maxChild;
685     } else done = PETSC_TRUE;
686   }
687   PetscFunctionReturn(0);
688 }
689 
690 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
691 PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
692 {
693   DMBoundaryType bx, by;
694   PetscBool      IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
695   MPI_Comm       comm;
696   PetscMPIInt    rank;
697   PetscInt     **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ;
698 
699   PetscFunctionBegin;
700   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
701   PetscCallMPI(MPI_Comm_rank(comm, &rank));
702   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL));
703 
704   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
705   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
706 
707   neighbors[0] = rank;
708   rank         = 0;
709   PetscCall(PetscMalloc1(PJ, &procs));
710   for (pj = 0; pj < PJ; pj++) {
711     PetscCall(PetscMalloc1(PI, &(procs[pj])));
712     for (pi = 0; pi < PI; pi++) {
713       procs[pj][pi] = rank;
714       rank++;
715     }
716   }
717 
718   pi  = neighbors[0] % PI;
719   pj  = neighbors[0] / PI;
720   pim = pi - 1;
721   if (pim < 0) pim = PI - 1;
722   pip = (pi + 1) % PI;
723   pjm = pj - 1;
724   if (pjm < 0) pjm = PJ - 1;
725   pjp = (pj + 1) % PJ;
726 
727   neighbors[1] = procs[pj][pim];
728   neighbors[2] = procs[pjp][pim];
729   neighbors[3] = procs[pjp][pi];
730   neighbors[4] = procs[pjp][pip];
731   neighbors[5] = procs[pj][pip];
732   neighbors[6] = procs[pjm][pip];
733   neighbors[7] = procs[pjm][pi];
734   neighbors[8] = procs[pjm][pim];
735 
736   if (!IPeriodic) {
737     if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
738     if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
739   }
740 
741   if (!JPeriodic) {
742     if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
743     if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
744   }
745 
746   for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj]));
747   PetscCall(PetscFree(procs));
748   PetscFunctionReturn(0);
749 }
750 
751 /*
752   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
753     2 | 3 | 4
754     __|___|__
755     1 | 0 | 5
756     __|___|__
757     8 | 7 | 6
758       |   |
759 */
760 PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
761 {
762   DMDALocalInfo info;
763   PetscReal     is, ie, js, je;
764 
765   PetscCall(DMDAGetLocalInfo(da, &info));
766   is = (PetscReal)info.xs - 0.5;
767   ie = (PetscReal)info.xs + info.xm - 0.5;
768   js = (PetscReal)info.ys - 0.5;
769   je = (PetscReal)info.ys + info.ym - 0.5;
770 
771   if (ir >= is && ir <= ie) { /* center column */
772     if (jr >= js && jr <= je) return 0;
773     else if (jr < js) return 7;
774     else return 3;
775   } else if (ir < is) { /* left column */
776     if (jr >= js && jr <= je) return 1;
777     else if (jr < js) return 8;
778     else return 2;
779   } else { /* right column */
780     if (jr >= js && jr <= je) return 5;
781     else if (jr < js) return 6;
782     else return 4;
783   }
784 }
785