xref: /petsc/src/ts/characteristic/interface/characteristic.c (revision 750b007cd8d816cecd9de99077bb0a703b4cf61a)
1 
2 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3 #include <petscdmda.h>
4 #include <petscviewer.h>
5 
6 PetscClassId      CHARACTERISTIC_CLASSID;
7 PetscLogEvent     CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8 PetscLogEvent     CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9 PetscLogEvent     CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10 /*
11    Contains the list of registered characteristic routines
12 */
13 PetscFunctionList CharacteristicList              = NULL;
14 PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;
15 
16 PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt[]);
17 PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18 PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar[]);
19 
20 PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21 PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
22 
23 PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer) {
24   PetscBool iascii;
25 
26   PetscFunctionBegin;
27   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
28   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c), &viewer));
29   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
30   PetscCheckSameComm(c, 1, viewer, 2);
31 
32   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
33   if (!iascii) PetscTryTypeMethod(c, view, viewer);
34   PetscFunctionReturn(0);
35 }
36 
37 PetscErrorCode CharacteristicDestroy(Characteristic *c) {
38   PetscFunctionBegin;
39   if (!*c) PetscFunctionReturn(0);
40   PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1);
41   if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(0);
42 
43   if ((*c)->ops->destroy) PetscCall((*(*c)->ops->destroy)((*c)));
44   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
45   PetscCall(PetscFree((*c)->queue));
46   PetscCall(PetscFree((*c)->queueLocal));
47   PetscCall(PetscFree((*c)->queueRemote));
48   PetscCall(PetscFree((*c)->neighbors));
49   PetscCall(PetscFree((*c)->needCount));
50   PetscCall(PetscFree((*c)->localOffsets));
51   PetscCall(PetscFree((*c)->fillCount));
52   PetscCall(PetscFree((*c)->remoteOffsets));
53   PetscCall(PetscFree((*c)->request));
54   PetscCall(PetscFree((*c)->status));
55   PetscCall(PetscHeaderDestroy(c));
56   PetscFunctionReturn(0);
57 }
58 
59 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c) {
60   Characteristic newC;
61 
62   PetscFunctionBegin;
63   PetscValidPointer(c, 2);
64   *c = NULL;
65   PetscCall(CharacteristicInitializePackage());
66 
67   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
68   *c = newC;
69 
70   newC->structured          = PETSC_TRUE;
71   newC->numIds              = 0;
72   newC->velocityDA          = NULL;
73   newC->velocity            = NULL;
74   newC->velocityOld         = NULL;
75   newC->numVelocityComp     = 0;
76   newC->velocityComp        = NULL;
77   newC->velocityInterp      = NULL;
78   newC->velocityInterpLocal = NULL;
79   newC->velocityCtx         = NULL;
80   newC->fieldDA             = NULL;
81   newC->field               = NULL;
82   newC->numFieldComp        = 0;
83   newC->fieldComp           = NULL;
84   newC->fieldInterp         = NULL;
85   newC->fieldInterpLocal    = NULL;
86   newC->fieldCtx            = NULL;
87   newC->itemType            = 0;
88   newC->queue               = NULL;
89   newC->queueSize           = 0;
90   newC->queueMax            = 0;
91   newC->queueLocal          = NULL;
92   newC->queueLocalSize      = 0;
93   newC->queueLocalMax       = 0;
94   newC->queueRemote         = NULL;
95   newC->queueRemoteSize     = 0;
96   newC->queueRemoteMax      = 0;
97   newC->numNeighbors        = 0;
98   newC->neighbors           = NULL;
99   newC->needCount           = NULL;
100   newC->localOffsets        = NULL;
101   newC->fillCount           = NULL;
102   newC->remoteOffsets       = NULL;
103   newC->request             = NULL;
104   newC->status              = NULL;
105   PetscFunctionReturn(0);
106 }
107 
108 /*@C
109    CharacteristicSetType - Builds Characteristic for a particular solver.
110 
111    Logically Collective on Characteristic
112 
113    Input Parameters:
114 +  c    - the method of characteristics context
115 -  type - a known method
116 
117    Options Database Key:
118 .  -characteristic_type <method> - Sets the method; use -help for a list
119     of available methods
120 
121    Notes:
122    See "include/petsccharacteristic.h" for available methods
123 
124   Normally, it is best to use the CharacteristicSetFromOptions() command and
125   then set the Characteristic type from the options database rather than by using
126   this routine.  Using the options database provides the user with
127   maximum flexibility in evaluating the many different Krylov methods.
128   The CharacteristicSetType() routine is provided for those situations where it
129   is necessary to set the iterative solver independently of the command
130   line or options database.  This might be the case, for example, when
131   the choice of iterative solver changes during the execution of the
132   program, and the user's application is taking responsibility for
133   choosing the appropriate method.  In other words, this routine is
134   not for beginners.
135 
136   Level: intermediate
137 
138 .seealso: `CharacteristicType`
139 
140 @*/
141 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type) {
142   PetscBool match;
143   PetscErrorCode (*r)(Characteristic);
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
147   PetscValidCharPointer(type, 2);
148 
149   PetscCall(PetscObjectTypeCompare((PetscObject)c, type, &match));
150   if (match) PetscFunctionReturn(0);
151 
152   if (c->data) {
153     /* destroy the old private Characteristic context */
154     PetscUseTypeMethod(c, destroy);
155     c->ops->destroy = NULL;
156     c->data         = NULL;
157   }
158 
159   PetscCall(PetscFunctionListFind(CharacteristicList, type, &r));
160   PetscCheck(r, PETSC_COMM_SELF, PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
161   c->setupcalled = 0;
162   PetscCall((*r)(c));
163   PetscCall(PetscObjectChangeTypeName((PetscObject)c, type));
164   PetscFunctionReturn(0);
165 }
166 
167 /*@
168    CharacteristicSetUp - Sets up the internal data structures for the
169    later use of an iterative solver.
170 
171    Collective on Characteristic
172 
173    Input Parameter:
174 .  ksp   - iterative context obtained from CharacteristicCreate()
175 
176    Level: developer
177 
178 .seealso: `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
179 @*/
180 PetscErrorCode CharacteristicSetUp(Characteristic c) {
181   PetscFunctionBegin;
182   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
183 
184   if (!((PetscObject)c)->type_name) PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));
185 
186   if (c->setupcalled == 2) PetscFunctionReturn(0);
187 
188   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
189   if (!c->setupcalled) PetscUseTypeMethod(c, setup);
190   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp, c, NULL, NULL, NULL));
191   c->setupcalled = 2;
192   PetscFunctionReturn(0);
193 }
194 
195 /*@C
196   CharacteristicRegister -  Adds a solver to the method of characteristics package.
197 
198    Not Collective
199 
200    Input Parameters:
201 +  name_solver - name of a new user-defined solver
202 -  routine_create - routine to create method context
203 
204   Sample usage:
205 .vb
206     CharacteristicRegister("my_char", MyCharCreate);
207 .ve
208 
209   Then, your Characteristic type can be chosen with the procedural interface via
210 .vb
211     CharacteristicCreate(MPI_Comm, Characteristic* &char);
212     CharacteristicSetType(char,"my_char");
213 .ve
214    or at runtime via the option
215 .vb
216     -characteristic_type my_char
217 .ve
218 
219    Notes:
220    CharacteristicRegister() may be called multiple times to add several user-defined solvers.
221 
222 .seealso: `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
223 
224   Level: advanced
225 @*/
226 PetscErrorCode CharacteristicRegister(const char sname[], PetscErrorCode (*function)(Characteristic)) {
227   PetscFunctionBegin;
228   PetscCall(CharacteristicInitializePackage());
229   PetscCall(PetscFunctionListAdd(&CharacteristicList, sname, function));
230   PetscFunctionReturn(0);
231 }
232 
233 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) {
234   PetscFunctionBegin;
235   c->velocityDA      = da;
236   c->velocity        = v;
237   c->velocityOld     = vOld;
238   c->numVelocityComp = numComponents;
239   c->velocityComp    = components;
240   c->velocityInterp  = interp;
241   c->velocityCtx     = ctx;
242   PetscFunctionReturn(0);
243 }
244 
245 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) {
246   PetscFunctionBegin;
247   c->velocityDA          = da;
248   c->velocity            = v;
249   c->velocityOld         = vOld;
250   c->numVelocityComp     = numComponents;
251   c->velocityComp        = components;
252   c->velocityInterpLocal = interp;
253   c->velocityCtx         = ctx;
254   PetscFunctionReturn(0);
255 }
256 
257 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) {
258   PetscFunctionBegin;
259 #if 0
260   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
261 #endif
262   c->fieldDA      = da;
263   c->field        = v;
264   c->numFieldComp = numComponents;
265   c->fieldComp    = components;
266   c->fieldInterp  = interp;
267   c->fieldCtx     = ctx;
268   PetscFunctionReturn(0);
269 }
270 
271 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void *, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void *), void *ctx) {
272   PetscFunctionBegin;
273 #if 0
274   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
275 #endif
276   c->fieldDA          = da;
277   c->field            = v;
278   c->numFieldComp     = numComponents;
279   c->fieldComp        = components;
280   c->fieldInterpLocal = interp;
281   c->fieldCtx         = ctx;
282   PetscFunctionReturn(0);
283 }
284 
285 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution) {
286   CharacteristicPointDA2D Qi;
287   DM                      da = c->velocityDA;
288   Vec                     velocityLocal, velocityLocalOld;
289   Vec                     fieldLocal;
290   DMDALocalInfo           info;
291   PetscScalar           **solArray;
292   void                   *velocityArray;
293   void                   *velocityArrayOld;
294   void                   *fieldArray;
295   PetscScalar            *interpIndices;
296   PetscScalar            *velocityValues, *velocityValuesOld;
297   PetscScalar            *fieldValues;
298   PetscMPIInt             rank;
299   PetscInt                dim;
300   PetscMPIInt             neighbors[9];
301   PetscInt                dof;
302   PetscInt                gx, gy;
303   PetscInt                n, is, ie, js, je, comp;
304 
305   PetscFunctionBegin;
306   c->queueSize = 0;
307   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
308   PetscCall(DMDAGetNeighborsRank(da, neighbors));
309   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
310   PetscCall(CharacteristicSetUp(c));
311   /* global and local grid info */
312   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
313   PetscCall(DMDAGetLocalInfo(da, &info));
314   is = info.xs;
315   ie = info.xs + info.xm;
316   js = info.ys;
317   je = info.ys + info.ym;
318   /* Allocation */
319   PetscCall(PetscMalloc1(dim, &interpIndices));
320   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
321   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
322   PetscCall(PetscMalloc1(c->numFieldComp, &fieldValues));
323   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));
324 
325   /* -----------------------------------------------------------------------
326      PART 1, AT t-dt/2
327      -----------------------------------------------------------------------*/
328   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
329   /* GET POSITION AT HALF TIME IN THE PAST */
330   if (c->velocityInterpLocal) {
331     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
332     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
333     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
334     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
335     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
336     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
337     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal, &velocityArray));
338     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
339   }
340   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
341   for (Qi.j = js; Qi.j < je; Qi.j++) {
342     for (Qi.i = is; Qi.i < ie; Qi.i++) {
343       interpIndices[0] = Qi.i;
344       interpIndices[1] = Qi.j;
345       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
346       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
347       Qi.x = Qi.i - velocityValues[0] * dt / 2.0;
348       Qi.y = Qi.j - velocityValues[1] * dt / 2.0;
349 
350       /* Determine whether the position at t - dt/2 is local */
351       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
352 
353       /* Check for Periodic boundaries and move all periodic points back onto the domain */
354       PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
355       PetscCall(CharacteristicAddPoint(c, &Qi));
356     }
357   }
358   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup, NULL, NULL, NULL, NULL));
359 
360   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
361   PetscCall(CharacteristicSendCoordinatesBegin(c));
362   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
363 
364   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
365   /* Calculate velocity at t_n+1/2 (local values) */
366   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
367   for (n = 0; n < c->queueSize; n++) {
368     Qi = c->queue[n];
369     if (c->neighbors[Qi.proc] == rank) {
370       interpIndices[0] = Qi.x;
371       interpIndices[1] = Qi.y;
372       if (c->velocityInterpLocal) {
373         PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
374         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
375       } else {
376         PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
377         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
378       }
379       Qi.x = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
380       Qi.y = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
381     }
382     c->queue[n] = Qi;
383   }
384   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal, NULL, NULL, NULL, NULL));
385 
386   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
387   PetscCall(CharacteristicSendCoordinatesEnd(c));
388   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
389 
390   /* Calculate velocity at t_n+1/2 (fill remote requests) */
391   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
392   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
393   for (n = 0; n < c->queueRemoteSize; n++) {
394     Qi               = c->queueRemote[n];
395     interpIndices[0] = Qi.x;
396     interpIndices[1] = Qi.y;
397     if (c->velocityInterpLocal) {
398       PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
399       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
400     } else {
401       PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
402       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
403     }
404     Qi.x              = 0.5 * (velocityValues[0] + velocityValuesOld[0]);
405     Qi.y              = 0.5 * (velocityValues[1] + velocityValuesOld[1]);
406     c->queueRemote[n] = Qi;
407   }
408   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote, NULL, NULL, NULL, NULL));
409   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
410   PetscCall(CharacteristicGetValuesBegin(c));
411   PetscCall(CharacteristicGetValuesEnd(c));
412   if (c->velocityInterpLocal) {
413     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal, &velocityArray));
414     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
415     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
416     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
417   }
418   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange, NULL, NULL, NULL, NULL));
419 
420   /* -----------------------------------------------------------------------
421      PART 2, AT t-dt
422      -----------------------------------------------------------------------*/
423 
424   /* GET POSITION AT t_n (local values) */
425   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
426   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
427   for (n = 0; n < c->queueSize; n++) {
428     Qi   = c->queue[n];
429     Qi.x = Qi.i - Qi.x * dt;
430     Qi.y = Qi.j - Qi.y * dt;
431 
432     /* Determine whether the position at t-dt is local */
433     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
434 
435     /* Check for Periodic boundaries and move all periodic points back onto the domain */
436     PetscCall(DMDAMapCoordsToPeriodicDomain(da, &(Qi.x), &(Qi.y)));
437 
438     c->queue[n] = Qi;
439   }
440   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
441 
442   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
443   PetscCall(CharacteristicSendCoordinatesBegin(c));
444   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
445 
446   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
447   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
448   if (c->fieldInterpLocal) {
449     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
450     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
451     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
452     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
453   }
454   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
455   for (n = 0; n < c->queueSize; n++) {
456     if (c->neighbors[c->queue[n].proc] == rank) {
457       interpIndices[0] = c->queue[n].x;
458       interpIndices[1] = c->queue[n].y;
459       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
460       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
461       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
462     }
463   }
464   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal, NULL, NULL, NULL, NULL));
465 
466   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
467   PetscCall(CharacteristicSendCoordinatesEnd(c));
468   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
469 
470   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
471   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
472   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
473   for (n = 0; n < c->queueRemoteSize; n++) {
474     interpIndices[0] = c->queueRemote[n].x;
475     interpIndices[1] = c->queueRemote[n].y;
476 
477     /* for debugging purposes */
478     if (1) { /* hacked bounds test...let's do better */
479       PetscScalar im = interpIndices[0];
480       PetscScalar jm = interpIndices[1];
481 
482       PetscCheck((im >= (PetscScalar)is - 1.) && (im <= (PetscScalar)ie) && (jm >= (PetscScalar)js - 1.) && (jm <= (PetscScalar)je), PETSC_COMM_SELF, PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
483     }
484 
485     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
486     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
487     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
488   }
489   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote, NULL, NULL, NULL, NULL));
490 
491   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
492   PetscCall(CharacteristicGetValuesBegin(c));
493   PetscCall(CharacteristicGetValuesEnd(c));
494   if (c->fieldInterpLocal) {
495     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
496     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
497   }
498   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange, NULL, NULL, NULL, NULL));
499 
500   /* Return field of characteristics at t_n-1 */
501   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
502   PetscCall(DMDAGetInfo(c->fieldDA, NULL, NULL, NULL, NULL, NULL, NULL, NULL, &dof, NULL, NULL, NULL, NULL, NULL));
503   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
504   for (n = 0; n < c->queueSize; n++) {
505     Qi = c->queue[n];
506     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i * dof + c->fieldComp[comp]] = Qi.field[comp];
507   }
508   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
509   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate, NULL, NULL, NULL, NULL));
510   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve, NULL, NULL, NULL, NULL));
511 
512   /* Cleanup */
513   PetscCall(PetscFree(interpIndices));
514   PetscCall(PetscFree(velocityValues));
515   PetscCall(PetscFree(velocityValuesOld));
516   PetscCall(PetscFree(fieldValues));
517   PetscFunctionReturn(0);
518 }
519 
520 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[]) {
521   PetscFunctionBegin;
522   c->numNeighbors = numNeighbors;
523   PetscCall(PetscFree(c->neighbors));
524   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
525   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
526   PetscFunctionReturn(0);
527 }
528 
529 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point) {
530   PetscFunctionBegin;
531   PetscCheck(c->queueSize < c->queueMax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
532   c->queue[c->queueSize++] = *point;
533   PetscFunctionReturn(0);
534 }
535 
536 int CharacteristicSendCoordinatesBegin(Characteristic c) {
537   PetscMPIInt rank, tag = 121;
538   PetscInt    i, n;
539 
540   PetscFunctionBegin;
541   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
542   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
543   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
544   for (i = 0; i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
545   c->fillCount[0] = 0;
546   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
547   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
548   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
549   /* Initialize the remote queue */
550   c->queueLocalMax = c->localOffsets[0] = 0;
551   c->queueRemoteMax = c->remoteOffsets[0] = 0;
552   for (n = 1; n < c->numNeighbors; n++) {
553     c->remoteOffsets[n] = c->queueRemoteMax;
554     c->queueRemoteMax += c->fillCount[n];
555     c->localOffsets[n] = c->queueLocalMax;
556     c->queueLocalMax += c->needCount[n];
557   }
558   /* HACK BEGIN */
559   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
560   c->needCount[0] = 0;
561   /* HACK END */
562   if (c->queueRemoteMax) {
563     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
564   } else c->queueRemote = NULL;
565   c->queueRemoteSize = c->queueRemoteMax;
566 
567   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
568   for (n = 1; n < c->numNeighbors; n++) {
569     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
570     PetscCallMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
571   }
572   for (n = 1; n < c->numNeighbors; n++) {
573     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
574     PetscCallMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
575   }
576   PetscFunctionReturn(0);
577 }
578 
579 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c) {
580 #if 0
581   PetscMPIInt rank;
582   PetscInt    n;
583 #endif
584 
585   PetscFunctionBegin;
586   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
587 #if 0
588   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
589   for (n = 0; n < c->queueRemoteSize; n++) {
590     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
591   }
592 #endif
593   PetscFunctionReturn(0);
594 }
595 
596 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c) {
597   PetscMPIInt tag = 121;
598   PetscInt    n;
599 
600   PetscFunctionBegin;
601   /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
602   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n - 1])));
603   for (n = 1; n < c->numNeighbors; n++) PetscCallMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
604   PetscFunctionReturn(0);
605 }
606 
607 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c) {
608   PetscFunctionBegin;
609   PetscCallMPI(MPI_Waitall(c->numNeighbors - 1, c->request, c->status));
610   /* Free queue of requests from other procs */
611   PetscCall(PetscFree(c->queueRemote));
612   PetscFunctionReturn(0);
613 }
614 
615 /*---------------------------------------------------------------------*/
616 /*
617   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
618 */
619 PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
620 /*---------------------------------------------------------------------*/
621 {
622   CharacteristicPointDA2D temp;
623   PetscInt                n;
624 
625   PetscFunctionBegin;
626   if (0) { /* Check the order of the queue before sorting */
627     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
628     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
629   }
630 
631   /* SORTING PHASE */
632   for (n = (size / 2) - 1; n >= 0; n--) { PetscCall(CharacteristicSiftDown(c, queue, n, size - 1)); /* Rich had size-1 here, Matt had size*/ }
633   for (n = size - 1; n >= 1; n--) {
634     temp     = queue[0];
635     queue[0] = queue[n];
636     queue[n] = temp;
637     PetscCall(CharacteristicSiftDown(c, queue, 0, n - 1));
638   }
639   if (0) { /* Check the order of the queue after sorting */
640     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
641     for (n = 0; n < size; n++) PetscCall(PetscInfo(NULL, "%" PetscInt_FMT " %d\n", n, queue[n].proc));
642   }
643   PetscFunctionReturn(0);
644 }
645 
646 /*---------------------------------------------------------------------*/
647 /*
648   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
649 */
650 PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
651 /*---------------------------------------------------------------------*/
652 {
653   PetscBool               done = PETSC_FALSE;
654   PetscInt                maxChild;
655   CharacteristicPointDA2D temp;
656 
657   PetscFunctionBegin;
658   while ((root * 2 <= bottom) && (!done)) {
659     if (root * 2 == bottom) maxChild = root * 2;
660     else if (queue[root * 2].proc > queue[root * 2 + 1].proc) maxChild = root * 2;
661     else maxChild = root * 2 + 1;
662 
663     if (queue[root].proc < queue[maxChild].proc) {
664       temp            = queue[root];
665       queue[root]     = queue[maxChild];
666       queue[maxChild] = temp;
667       root            = maxChild;
668     } else done = PETSC_TRUE;
669   }
670   PetscFunctionReturn(0);
671 }
672 
673 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
674 PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[]) {
675   DMBoundaryType bx, by;
676   PetscBool      IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
677   MPI_Comm       comm;
678   PetscMPIInt    rank;
679   PetscInt     **procs, pi, pj, pim, pip, pjm, pjp, PI, PJ;
680 
681   PetscFunctionBegin;
682   PetscCall(PetscObjectGetComm((PetscObject)da, &comm));
683   PetscCallMPI(MPI_Comm_rank(comm, &rank));
684   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI, &PJ, NULL, NULL, NULL, &bx, &by, NULL, NULL));
685 
686   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
687   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
688 
689   neighbors[0] = rank;
690   rank         = 0;
691   PetscCall(PetscMalloc1(PJ, &procs));
692   for (pj = 0; pj < PJ; pj++) {
693     PetscCall(PetscMalloc1(PI, &(procs[pj])));
694     for (pi = 0; pi < PI; pi++) {
695       procs[pj][pi] = rank;
696       rank++;
697     }
698   }
699 
700   pi  = neighbors[0] % PI;
701   pj  = neighbors[0] / PI;
702   pim = pi - 1;
703   if (pim < 0) pim = PI - 1;
704   pip = (pi + 1) % PI;
705   pjm = pj - 1;
706   if (pjm < 0) pjm = PJ - 1;
707   pjp = (pj + 1) % PJ;
708 
709   neighbors[1] = procs[pj][pim];
710   neighbors[2] = procs[pjp][pim];
711   neighbors[3] = procs[pjp][pi];
712   neighbors[4] = procs[pjp][pip];
713   neighbors[5] = procs[pj][pip];
714   neighbors[6] = procs[pjm][pip];
715   neighbors[7] = procs[pjm][pi];
716   neighbors[8] = procs[pjm][pim];
717 
718   if (!IPeriodic) {
719     if (pi == 0) neighbors[1] = neighbors[2] = neighbors[8] = neighbors[0];
720     if (pi == PI - 1) neighbors[4] = neighbors[5] = neighbors[6] = neighbors[0];
721   }
722 
723   if (!JPeriodic) {
724     if (pj == 0) neighbors[6] = neighbors[7] = neighbors[8] = neighbors[0];
725     if (pj == PJ - 1) neighbors[2] = neighbors[3] = neighbors[4] = neighbors[0];
726   }
727 
728   for (pj = 0; pj < PJ; pj++) PetscCall(PetscFree(procs[pj]));
729   PetscCall(PetscFree(procs));
730   PetscFunctionReturn(0);
731 }
732 
733 /*
734   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
735     2 | 3 | 4
736     __|___|__
737     1 | 0 | 5
738     __|___|__
739     8 | 7 | 6
740       |   |
741 */
742 PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr) {
743   DMDALocalInfo info;
744   PetscReal     is, ie, js, je;
745 
746   PetscCall(DMDAGetLocalInfo(da, &info));
747   is = (PetscReal)info.xs - 0.5;
748   ie = (PetscReal)info.xs + info.xm - 0.5;
749   js = (PetscReal)info.ys - 0.5;
750   je = (PetscReal)info.ys + info.ym - 0.5;
751 
752   if (ir >= is && ir <= ie) { /* center column */
753     if (jr >= js && jr <= je) return 0;
754     else if (jr < js) return 7;
755     else return 3;
756   } else if (ir < is) { /* left column */
757     if (jr >= js && jr <= je) return 1;
758     else if (jr < js) return 8;
759     else return 2;
760   } else { /* right column */
761     if (jr >= js && jr <= je) return 5;
762     else if (jr < js) return 6;
763     else return 4;
764   }
765 }
766