xref: /petsc/src/ts/characteristic/interface/characteristic.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3 #include <petscdmda.h>
4 #include <petscviewer.h>
5 
6 PetscClassId  CHARACTERISTIC_CLASSID;
7 PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8 PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9 PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10 /*
11    Contains the list of registered characteristic routines
12 */
13 PetscFunctionList CharacteristicList              = NULL;
14 PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;
15 
16 PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
17 PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18 PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);
19 
20 PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21 PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
22 
23 PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24 {
25   PetscBool      iascii;
26 
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
29   if (!viewer) {
30     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer));
31   }
32   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
33   PetscCheckSameComm(c, 1, viewer, 2);
34 
35   PetscCall(PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii));
36   if (!iascii) {
37     if (c->ops->view) PetscCall((*c->ops->view)(c, viewer));
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 PetscErrorCode CharacteristicDestroy(Characteristic *c)
43 {
44   PetscFunctionBegin;
45   if (!*c) PetscFunctionReturn(0);
46   PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1);
47   if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(0);
48 
49   if ((*c)->ops->destroy) {
50     PetscCall((*(*c)->ops->destroy)((*c)));
51   }
52   PetscCallMPI(MPI_Type_free(&(*c)->itemType));
53   PetscCall(PetscFree((*c)->queue));
54   PetscCall(PetscFree((*c)->queueLocal));
55   PetscCall(PetscFree((*c)->queueRemote));
56   PetscCall(PetscFree((*c)->neighbors));
57   PetscCall(PetscFree((*c)->needCount));
58   PetscCall(PetscFree((*c)->localOffsets));
59   PetscCall(PetscFree((*c)->fillCount));
60   PetscCall(PetscFree((*c)->remoteOffsets));
61   PetscCall(PetscFree((*c)->request));
62   PetscCall(PetscFree((*c)->status));
63   PetscCall(PetscHeaderDestroy(c));
64   PetscFunctionReturn(0);
65 }
66 
67 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
68 {
69   Characteristic newC;
70 
71   PetscFunctionBegin;
72   PetscValidPointer(c, 2);
73   *c = NULL;
74   PetscCall(CharacteristicInitializePackage());
75 
76   PetscCall(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
77   *c   = newC;
78 
79   newC->structured          = PETSC_TRUE;
80   newC->numIds              = 0;
81   newC->velocityDA          = NULL;
82   newC->velocity            = NULL;
83   newC->velocityOld         = NULL;
84   newC->numVelocityComp     = 0;
85   newC->velocityComp        = NULL;
86   newC->velocityInterp      = NULL;
87   newC->velocityInterpLocal = NULL;
88   newC->velocityCtx         = NULL;
89   newC->fieldDA             = NULL;
90   newC->field               = NULL;
91   newC->numFieldComp        = 0;
92   newC->fieldComp           = NULL;
93   newC->fieldInterp         = NULL;
94   newC->fieldInterpLocal    = NULL;
95   newC->fieldCtx            = NULL;
96   newC->itemType            = 0;
97   newC->queue               = NULL;
98   newC->queueSize           = 0;
99   newC->queueMax            = 0;
100   newC->queueLocal          = NULL;
101   newC->queueLocalSize      = 0;
102   newC->queueLocalMax       = 0;
103   newC->queueRemote         = NULL;
104   newC->queueRemoteSize     = 0;
105   newC->queueRemoteMax      = 0;
106   newC->numNeighbors        = 0;
107   newC->neighbors           = NULL;
108   newC->needCount           = NULL;
109   newC->localOffsets        = NULL;
110   newC->fillCount           = NULL;
111   newC->remoteOffsets       = NULL;
112   newC->request             = NULL;
113   newC->status              = NULL;
114   PetscFunctionReturn(0);
115 }
116 
117 /*@C
118    CharacteristicSetType - Builds Characteristic for a particular solver.
119 
120    Logically Collective on Characteristic
121 
122    Input Parameters:
123 +  c    - the method of characteristics context
124 -  type - a known method
125 
126    Options Database Key:
127 .  -characteristic_type <method> - Sets the method; use -help for a list
128     of available methods
129 
130    Notes:
131    See "include/petsccharacteristic.h" for available methods
132 
133   Normally, it is best to use the CharacteristicSetFromOptions() command and
134   then set the Characteristic type from the options database rather than by using
135   this routine.  Using the options database provides the user with
136   maximum flexibility in evaluating the many different Krylov methods.
137   The CharacteristicSetType() routine is provided for those situations where it
138   is necessary to set the iterative solver independently of the command
139   line or options database.  This might be the case, for example, when
140   the choice of iterative solver changes during the execution of the
141   program, and the user's application is taking responsibility for
142   choosing the appropriate method.  In other words, this routine is
143   not for beginners.
144 
145   Level: intermediate
146 
147 .seealso: `CharacteristicType`
148 
149 @*/
150 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
151 {
152   PetscBool      match;
153   PetscErrorCode (*r)(Characteristic);
154 
155   PetscFunctionBegin;
156   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
157   PetscValidCharPointer(type, 2);
158 
159   PetscCall(PetscObjectTypeCompare((PetscObject) c, type, &match));
160   if (match) PetscFunctionReturn(0);
161 
162   if (c->data) {
163     /* destroy the old private Characteristic context */
164     PetscCall((*c->ops->destroy)(c));
165     c->ops->destroy = NULL;
166     c->data         = NULL;
167   }
168 
169   PetscCall(PetscFunctionListFind(CharacteristicList,type,&r));
170   PetscCheck(r,PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
171   c->setupcalled = 0;
172   PetscCall((*r)(c));
173   PetscCall(PetscObjectChangeTypeName((PetscObject) c, type));
174   PetscFunctionReturn(0);
175 }
176 
177 /*@
178    CharacteristicSetUp - Sets up the internal data structures for the
179    later use of an iterative solver.
180 
181    Collective on Characteristic
182 
183    Input Parameter:
184 .  ksp   - iterative context obtained from CharacteristicCreate()
185 
186    Level: developer
187 
188 .seealso: `CharacteristicCreate()`, `CharacteristicSolve()`, `CharacteristicDestroy()`
189 @*/
190 PetscErrorCode CharacteristicSetUp(Characteristic c)
191 {
192   PetscFunctionBegin;
193   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
194 
195   if (!((PetscObject)c)->type_name) {
196     PetscCall(CharacteristicSetType(c, CHARACTERISTICDA));
197   }
198 
199   if (c->setupcalled == 2) PetscFunctionReturn(0);
200 
201   PetscCall(PetscLogEventBegin(CHARACTERISTIC_SetUp,c,NULL,NULL,NULL));
202   if (!c->setupcalled) {
203     PetscCall((*c->ops->setup)(c));
204   }
205   PetscCall(PetscLogEventEnd(CHARACTERISTIC_SetUp,c,NULL,NULL,NULL));
206   c->setupcalled = 2;
207   PetscFunctionReturn(0);
208 }
209 
210 /*@C
211   CharacteristicRegister -  Adds a solver to the method of characteristics package.
212 
213    Not Collective
214 
215    Input Parameters:
216 +  name_solver - name of a new user-defined solver
217 -  routine_create - routine to create method context
218 
219   Sample usage:
220 .vb
221     CharacteristicRegister("my_char", MyCharCreate);
222 .ve
223 
224   Then, your Characteristic type can be chosen with the procedural interface via
225 .vb
226     CharacteristicCreate(MPI_Comm, Characteristic* &char);
227     CharacteristicSetType(char,"my_char");
228 .ve
229    or at runtime via the option
230 .vb
231     -characteristic_type my_char
232 .ve
233 
234    Notes:
235    CharacteristicRegister() may be called multiple times to add several user-defined solvers.
236 
237 .seealso: `CharacteristicRegisterAll()`, `CharacteristicRegisterDestroy()`
238 
239   Level: advanced
240 @*/
241 PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
242 {
243   PetscFunctionBegin;
244   PetscCall(CharacteristicInitializePackage());
245   PetscCall(PetscFunctionListAdd(&CharacteristicList,sname,function));
246   PetscFunctionReturn(0);
247 }
248 
249 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
250 {
251   PetscFunctionBegin;
252   c->velocityDA      = da;
253   c->velocity        = v;
254   c->velocityOld     = vOld;
255   c->numVelocityComp = numComponents;
256   c->velocityComp    = components;
257   c->velocityInterp  = interp;
258   c->velocityCtx     = ctx;
259   PetscFunctionReturn(0);
260 }
261 
262 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
263 {
264   PetscFunctionBegin;
265   c->velocityDA          = da;
266   c->velocity            = v;
267   c->velocityOld         = vOld;
268   c->numVelocityComp     = numComponents;
269   c->velocityComp        = components;
270   c->velocityInterpLocal = interp;
271   c->velocityCtx         = ctx;
272   PetscFunctionReturn(0);
273 }
274 
275 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
276 {
277   PetscFunctionBegin;
278 #if 0
279   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
280 #endif
281   c->fieldDA      = da;
282   c->field        = v;
283   c->numFieldComp = numComponents;
284   c->fieldComp    = components;
285   c->fieldInterp  = interp;
286   c->fieldCtx     = ctx;
287   PetscFunctionReturn(0);
288 }
289 
290 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
291 {
292   PetscFunctionBegin;
293 #if 0
294   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
295 #endif
296   c->fieldDA          = da;
297   c->field            = v;
298   c->numFieldComp     = numComponents;
299   c->fieldComp        = components;
300   c->fieldInterpLocal = interp;
301   c->fieldCtx         = ctx;
302   PetscFunctionReturn(0);
303 }
304 
305 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
306 {
307   CharacteristicPointDA2D Qi;
308   DM                      da = c->velocityDA;
309   Vec                     velocityLocal, velocityLocalOld;
310   Vec                     fieldLocal;
311   DMDALocalInfo           info;
312   PetscScalar             **solArray;
313   void                    *velocityArray;
314   void                    *velocityArrayOld;
315   void                    *fieldArray;
316   PetscScalar             *interpIndices;
317   PetscScalar             *velocityValues, *velocityValuesOld;
318   PetscScalar             *fieldValues;
319   PetscMPIInt             rank;
320   PetscInt                dim;
321   PetscMPIInt             neighbors[9];
322   PetscInt                dof;
323   PetscInt                gx, gy;
324   PetscInt                n, is, ie, js, je, comp;
325 
326   PetscFunctionBegin;
327   c->queueSize = 0;
328   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
329   PetscCall(DMDAGetNeighborsRank(da, neighbors));
330   PetscCall(CharacteristicSetNeighbors(c, 9, neighbors));
331   PetscCall(CharacteristicSetUp(c));
332   /* global and local grid info */
333   PetscCall(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
334   PetscCall(DMDAGetLocalInfo(da, &info));
335   is   = info.xs;          ie   = info.xs+info.xm;
336   js   = info.ys;          je   = info.ys+info.ym;
337   /* Allocation */
338   PetscCall(PetscMalloc1(dim,                &interpIndices));
339   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValues));
340   PetscCall(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
341   PetscCall(PetscMalloc1(c->numFieldComp,    &fieldValues));
342   PetscCall(PetscLogEventBegin(CHARACTERISTIC_Solve,NULL,NULL,NULL,NULL));
343 
344   /* -----------------------------------------------------------------------
345      PART 1, AT t-dt/2
346      -----------------------------------------------------------------------*/
347   PetscCall(PetscLogEventBegin(CHARACTERISTIC_QueueSetup,NULL,NULL,NULL,NULL));
348   /* GET POSITION AT HALF TIME IN THE PAST */
349   if (c->velocityInterpLocal) {
350     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocal));
351     PetscCall(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
352     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
353     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
354     PetscCall(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
355     PetscCall(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
356     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocal,    &velocityArray));
357     PetscCall(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
358   }
359   PetscCall(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
360   for (Qi.j = js; Qi.j < je; Qi.j++) {
361     for (Qi.i = is; Qi.i < ie; Qi.i++) {
362       interpIndices[0] = Qi.i;
363       interpIndices[1] = Qi.j;
364       if (c->velocityInterpLocal) PetscCall(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
365       else PetscCall(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
366       Qi.x = Qi.i - velocityValues[0]*dt/2.0;
367       Qi.y = Qi.j - velocityValues[1]*dt/2.0;
368 
369       /* Determine whether the position at t - dt/2 is local */
370       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
371 
372       /* Check for Periodic boundaries and move all periodic points back onto the domain */
373       PetscCall(DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y)));
374       PetscCall(CharacteristicAddPoint(c, &Qi));
375     }
376   }
377   PetscCall(PetscLogEventEnd(CHARACTERISTIC_QueueSetup,NULL,NULL,NULL,NULL));
378 
379   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
380   PetscCall(CharacteristicSendCoordinatesBegin(c));
381   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
382 
383   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,NULL,NULL,NULL,NULL));
384   /* Calculate velocity at t_n+1/2 (local values) */
385   PetscCall(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
386   for (n = 0; n < c->queueSize; n++) {
387     Qi = c->queue[n];
388     if (c->neighbors[Qi.proc] == rank) {
389       interpIndices[0] = Qi.x;
390       interpIndices[1] = Qi.y;
391       if (c->velocityInterpLocal) {
392         PetscCall(c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
393         PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
394       } else {
395         PetscCall(c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
396         PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
397       }
398       Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
399       Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
400     }
401     c->queue[n] = Qi;
402   }
403   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,NULL,NULL,NULL,NULL));
404 
405   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
406   PetscCall(CharacteristicSendCoordinatesEnd(c));
407   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
408 
409   /* Calculate velocity at t_n+1/2 (fill remote requests) */
410   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,NULL,NULL,NULL,NULL));
411   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
412   for (n = 0; n < c->queueRemoteSize; n++) {
413     Qi = c->queueRemote[n];
414     interpIndices[0] = Qi.x;
415     interpIndices[1] = Qi.y;
416     if (c->velocityInterpLocal) {
417       PetscCall(c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx));
418       PetscCall(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
419     } else {
420       PetscCall(c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx));
421       PetscCall(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
422     }
423     Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
424     Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
425     c->queueRemote[n] = Qi;
426   }
427   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,NULL,NULL,NULL,NULL));
428   PetscCall(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
429   PetscCall(CharacteristicGetValuesBegin(c));
430   PetscCall(CharacteristicGetValuesEnd(c));
431   if (c->velocityInterpLocal) {
432     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocal,    &velocityArray));
433     PetscCall(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
434     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
435     PetscCall(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
436   }
437   PetscCall(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
438 
439   /* -----------------------------------------------------------------------
440      PART 2, AT t-dt
441      -----------------------------------------------------------------------*/
442 
443   /* GET POSITION AT t_n (local values) */
444   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
445   PetscCall(PetscInfo(NULL, "Calculating position at t_{n}\n"));
446   for (n = 0; n < c->queueSize; n++) {
447     Qi   = c->queue[n];
448     Qi.x = Qi.i - Qi.x*dt;
449     Qi.y = Qi.j - Qi.y*dt;
450 
451     /* Determine whether the position at t-dt is local */
452     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
453 
454     /* Check for Periodic boundaries and move all periodic points back onto the domain */
455     PetscCall(DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y)));
456 
457     c->queue[n] = Qi;
458   }
459   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
460 
461   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
462   PetscCall(CharacteristicSendCoordinatesBegin(c));
463   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
464 
465   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
466   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
467   if (c->fieldInterpLocal) {
468     PetscCall(DMGetLocalVector(c->fieldDA, &fieldLocal));
469     PetscCall(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
470     PetscCall(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
471     PetscCall(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
472   }
473   PetscCall(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
474   for (n = 0; n < c->queueSize; n++) {
475     if (c->neighbors[c->queue[n].proc] == rank) {
476       interpIndices[0] = c->queue[n].x;
477       interpIndices[1] = c->queue[n].y;
478       if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
479       else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
480       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
481     }
482   }
483   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
484 
485   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
486   PetscCall(CharacteristicSendCoordinatesEnd(c));
487   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
488 
489   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
490   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,NULL,NULL,NULL,NULL));
491   PetscCall(PetscInfo(NULL, "Calculating %" PetscInt_FMT " remote field points at t_{n}\n", c->queueRemoteSize));
492   for (n = 0; n < c->queueRemoteSize; n++) {
493     interpIndices[0] = c->queueRemote[n].x;
494     interpIndices[1] = c->queueRemote[n].y;
495 
496     /* for debugging purposes */
497     if (1) { /* hacked bounds test...let's do better */
498       PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
499 
500       PetscCheck((im >= (PetscScalar) is - 1.) && (im <= (PetscScalar) ie) && (jm >= (PetscScalar)  js - 1.) && (jm <= (PetscScalar) je),PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", (double)PetscAbsScalar(im), (double)PetscAbsScalar(jm));
501     }
502 
503     if (c->fieldInterpLocal) PetscCall(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
504     else PetscCall(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
505     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
506   }
507   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,NULL,NULL,NULL,NULL));
508 
509   PetscCall(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
510   PetscCall(CharacteristicGetValuesBegin(c));
511   PetscCall(CharacteristicGetValuesEnd(c));
512   if (c->fieldInterpLocal) {
513     PetscCall(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
514     PetscCall(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
515   }
516   PetscCall(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
517 
518   /* Return field of characteristics at t_n-1 */
519   PetscCall(PetscLogEventBegin(CHARACTERISTIC_DAUpdate,NULL,NULL,NULL,NULL));
520   PetscCall(DMDAGetInfo(c->fieldDA,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&dof,NULL,NULL,NULL,NULL,NULL));
521   PetscCall(DMDAVecGetArray(c->fieldDA, solution, &solArray));
522   for (n = 0; n < c->queueSize; n++) {
523     Qi = c->queue[n];
524     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
525   }
526   PetscCall(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
527   PetscCall(PetscLogEventEnd(CHARACTERISTIC_DAUpdate,NULL,NULL,NULL,NULL));
528   PetscCall(PetscLogEventEnd(CHARACTERISTIC_Solve,NULL,NULL,NULL,NULL));
529 
530   /* Cleanup */
531   PetscCall(PetscFree(interpIndices));
532   PetscCall(PetscFree(velocityValues));
533   PetscCall(PetscFree(velocityValuesOld));
534   PetscCall(PetscFree(fieldValues));
535   PetscFunctionReturn(0);
536 }
537 
538 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
539 {
540   PetscFunctionBegin;
541   c->numNeighbors = numNeighbors;
542   PetscCall(PetscFree(c->neighbors));
543   PetscCall(PetscMalloc1(numNeighbors, &c->neighbors));
544   PetscCall(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
545   PetscFunctionReturn(0);
546 }
547 
548 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
549 {
550   PetscFunctionBegin;
551   PetscCheck(c->queueSize < c->queueMax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %" PetscInt_FMT, c->queueMax);
552   c->queue[c->queueSize++] = *point;
553   PetscFunctionReturn(0);
554 }
555 
556 int CharacteristicSendCoordinatesBegin(Characteristic c)
557 {
558   PetscMPIInt    rank, tag = 121;
559   PetscInt       i, n;
560 
561   PetscFunctionBegin;
562   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
563   PetscCall(CharacteristicHeapSort(c, c->queue, c->queueSize));
564   PetscCall(PetscArrayzero(c->needCount, c->numNeighbors));
565   for (i = 0;  i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
566   c->fillCount[0] = 0;
567   for (n = 1; n < c->numNeighbors; n++) {
568     PetscCallMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1])));
569   }
570   for (n = 1; n < c->numNeighbors; n++) {
571     PetscCallMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
572   }
573   PetscCallMPI(MPI_Waitall(c->numNeighbors-1, c->request, c->status));
574   /* Initialize the remote queue */
575   c->queueLocalMax  = c->localOffsets[0]  = 0;
576   c->queueRemoteMax = c->remoteOffsets[0] = 0;
577   for (n = 1; n < c->numNeighbors; n++) {
578     c->remoteOffsets[n] = c->queueRemoteMax;
579     c->queueRemoteMax  += c->fillCount[n];
580     c->localOffsets[n]  = c->queueLocalMax;
581     c->queueLocalMax   += c->needCount[n];
582   }
583   /* HACK BEGIN */
584   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
585   c->needCount[0] = 0;
586   /* HACK END */
587   if (c->queueRemoteMax) {
588     PetscCall(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
589   } else c->queueRemote = NULL;
590   c->queueRemoteSize = c->queueRemoteMax;
591 
592   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
593   for (n = 1; n < c->numNeighbors; n++) {
594     PetscCall(PetscInfo(NULL, "Receiving %" PetscInt_FMT " requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
595     PetscCallMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1])));
596   }
597   for (n = 1; n < c->numNeighbors; n++) {
598     PetscCall(PetscInfo(NULL, "Sending %" PetscInt_FMT " requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
599     PetscCallMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
600   }
601   PetscFunctionReturn(0);
602 }
603 
604 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
605 {
606 #if 0
607   PetscMPIInt rank;
608   PetscInt    n;
609 #endif
610 
611   PetscFunctionBegin;
612   PetscCallMPI(MPI_Waitall(c->numNeighbors-1, c->request, c->status));
613 #if 0
614   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
615   for (n = 0; n < c->queueRemoteSize; n++) {
616     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
617   }
618 #endif
619   PetscFunctionReturn(0);
620 }
621 
622 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
623 {
624   PetscMPIInt    tag = 121;
625   PetscInt       n;
626 
627   PetscFunctionBegin;
628   /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
629   for (n = 1; n < c->numNeighbors; n++) {
630     PetscCallMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1])));
631   }
632   for (n = 1; n < c->numNeighbors; n++) {
633     PetscCallMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
634   }
635   PetscFunctionReturn(0);
636 }
637 
638 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
639 {
640   PetscFunctionBegin;
641   PetscCallMPI(MPI_Waitall(c->numNeighbors-1, c->request, c->status));
642   /* Free queue of requests from other procs */
643   PetscCall(PetscFree(c->queueRemote));
644   PetscFunctionReturn(0);
645 }
646 
647 /*---------------------------------------------------------------------*/
648 /*
649   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
650 */
651 PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
652 /*---------------------------------------------------------------------*/
653 {
654   CharacteristicPointDA2D temp;
655   PetscInt                n;
656 
657   PetscFunctionBegin;
658   if (0) { /* Check the order of the queue before sorting */
659     PetscCall(PetscInfo(NULL, "Before Heap sort\n"));
660     for (n=0; n<size; n++) {
661       PetscCall(PetscInfo(NULL,"%" PetscInt_FMT " %d\n",n,queue[n].proc));
662     }
663   }
664 
665   /* SORTING PHASE */
666   for (n = (size / 2)-1; n >= 0; n--) {
667     PetscCall(CharacteristicSiftDown(c, queue, n, size-1)); /* Rich had size-1 here, Matt had size*/
668   }
669   for (n = size-1; n >= 1; n--) {
670     temp     = queue[0];
671     queue[0] = queue[n];
672     queue[n] = temp;
673     PetscCall(CharacteristicSiftDown(c, queue, 0, n-1));
674   }
675   if (0) { /* Check the order of the queue after sorting */
676     PetscCall(PetscInfo(NULL, "Avter  Heap sort\n"));
677     for (n=0; n<size; n++) {
678       PetscCall(PetscInfo(NULL,"%" PetscInt_FMT " %d\n",n,queue[n].proc));
679     }
680   }
681   PetscFunctionReturn(0);
682 }
683 
684 /*---------------------------------------------------------------------*/
685 /*
686   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
687 */
688 PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
689 /*---------------------------------------------------------------------*/
690 {
691   PetscBool               done = PETSC_FALSE;
692   PetscInt                maxChild;
693   CharacteristicPointDA2D temp;
694 
695   PetscFunctionBegin;
696   while ((root*2 <= bottom) && (!done)) {
697     if (root*2 == bottom) maxChild = root * 2;
698     else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
699     else maxChild = root * 2 + 1;
700 
701     if (queue[root].proc < queue[maxChild].proc) {
702       temp            = queue[root];
703       queue[root]     = queue[maxChild];
704       queue[maxChild] = temp;
705       root            = maxChild;
706     } else done = PETSC_TRUE;
707   }
708   PetscFunctionReturn(0);
709 }
710 
711 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
712 PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
713 {
714   DMBoundaryType   bx, by;
715   PetscBool        IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
716   MPI_Comm         comm;
717   PetscMPIInt      rank;
718   PetscInt         **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
719 
720   PetscFunctionBegin;
721   PetscCall(PetscObjectGetComm((PetscObject) da, &comm));
722   PetscCallMPI(MPI_Comm_rank(comm, &rank));
723   PetscCall(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI,&PJ, NULL, NULL, NULL, &bx, &by,NULL, NULL));
724 
725   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
726   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
727 
728   neighbors[0] = rank;
729   rank = 0;
730   PetscCall(PetscMalloc1(PJ,&procs));
731   for (pj=0; pj<PJ; pj++) {
732     PetscCall(PetscMalloc1(PI,&(procs[pj])));
733     for (pi=0; pi<PI; pi++) {
734       procs[pj][pi] = rank;
735       rank++;
736     }
737   }
738 
739   pi  = neighbors[0] % PI;
740   pj  = neighbors[0] / PI;
741   pim = pi-1;  if (pim<0) pim=PI-1;
742   pip = (pi+1)%PI;
743   pjm = pj-1;  if (pjm<0) pjm=PJ-1;
744   pjp = (pj+1)%PJ;
745 
746   neighbors[1] = procs[pj] [pim];
747   neighbors[2] = procs[pjp][pim];
748   neighbors[3] = procs[pjp][pi];
749   neighbors[4] = procs[pjp][pip];
750   neighbors[5] = procs[pj] [pip];
751   neighbors[6] = procs[pjm][pip];
752   neighbors[7] = procs[pjm][pi];
753   neighbors[8] = procs[pjm][pim];
754 
755   if (!IPeriodic) {
756     if (pi==0)    neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
757     if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
758   }
759 
760   if (!JPeriodic) {
761     if (pj==0)    neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
762     if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
763   }
764 
765   for (pj = 0; pj < PJ; pj++) {
766     PetscCall(PetscFree(procs[pj]));
767   }
768   PetscCall(PetscFree(procs));
769   PetscFunctionReturn(0);
770 }
771 
772 /*
773   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
774     2 | 3 | 4
775     __|___|__
776     1 | 0 | 5
777     __|___|__
778     8 | 7 | 6
779       |   |
780 */
781 PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
782 {
783   DMDALocalInfo  info;
784   PetscReal      is,ie,js,je;
785 
786   PetscCall(DMDAGetLocalInfo(da, &info));
787   is   = (PetscReal) info.xs - 0.5; ie = (PetscReal) info.xs + info.xm - 0.5;
788   js   = (PetscReal) info.ys - 0.5; je = (PetscReal) info.ys + info.ym - 0.5;
789 
790   if (ir >= is && ir <= ie) { /* center column */
791     if (jr >= js && jr <= je) return 0;
792     else if (jr < js)         return 7;
793     else                      return 3;
794   } else if (ir < is) {     /* left column */
795     if (jr >= js && jr <= je) return 1;
796     else if (jr < js)         return 8;
797     else                      return 2;
798   } else {                  /* right column */
799     if (jr >= js && jr <= je) return 5;
800     else if (jr < js)         return 6;
801     else                      return 4;
802   }
803 }
804