xref: /petsc/src/ts/characteristic/interface/characteristic.c (revision b122ec5aa1bd4469eb4e0673542fb7de3f411254)
1 
2 #include <petsc/private/characteristicimpl.h> /*I "petsccharacteristic.h" I*/
3 #include <petscdmda.h>
4 #include <petscviewer.h>
5 
6 PetscClassId  CHARACTERISTIC_CLASSID;
7 PetscLogEvent CHARACTERISTIC_SetUp, CHARACTERISTIC_Solve, CHARACTERISTIC_QueueSetup, CHARACTERISTIC_DAUpdate;
8 PetscLogEvent CHARACTERISTIC_HalfTimeLocal, CHARACTERISTIC_HalfTimeRemote, CHARACTERISTIC_HalfTimeExchange;
9 PetscLogEvent CHARACTERISTIC_FullTimeLocal, CHARACTERISTIC_FullTimeRemote, CHARACTERISTIC_FullTimeExchange;
10 /*
11    Contains the list of registered characteristic routines
12 */
13 PetscFunctionList CharacteristicList              = NULL;
14 PetscBool         CharacteristicRegisterAllCalled = PETSC_FALSE;
15 
16 PetscErrorCode DMDAGetNeighborsRank(DM, PetscMPIInt []);
17 PetscInt       DMDAGetNeighborRelative(DM, PetscReal, PetscReal);
18 PetscErrorCode DMDAMapToPeriodicDomain(DM, PetscScalar []);
19 
20 PetscErrorCode CharacteristicHeapSort(Characteristic, Queue, PetscInt);
21 PetscErrorCode CharacteristicSiftDown(Characteristic, Queue, PetscInt, PetscInt);
22 
23 PetscErrorCode CharacteristicView(Characteristic c, PetscViewer viewer)
24 {
25   PetscBool      iascii;
26 
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
29   if (!viewer) {
30     CHKERRQ(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)c),&viewer));
31   }
32   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
33   PetscCheckSameComm(c, 1, viewer, 2);
34 
35   CHKERRQ(PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii));
36   if (!iascii) {
37     if (c->ops->view) {
38       CHKERRQ((*c->ops->view)(c, viewer));
39     }
40   }
41   PetscFunctionReturn(0);
42 }
43 
44 PetscErrorCode CharacteristicDestroy(Characteristic *c)
45 {
46   PetscFunctionBegin;
47   if (!*c) PetscFunctionReturn(0);
48   PetscValidHeaderSpecific(*c, CHARACTERISTIC_CLASSID, 1);
49   if (--((PetscObject)(*c))->refct > 0) PetscFunctionReturn(0);
50 
51   if ((*c)->ops->destroy) {
52     CHKERRQ((*(*c)->ops->destroy)((*c)));
53   }
54   CHKERRMPI(MPI_Type_free(&(*c)->itemType));
55   CHKERRQ(PetscFree((*c)->queue));
56   CHKERRQ(PetscFree((*c)->queueLocal));
57   CHKERRQ(PetscFree((*c)->queueRemote));
58   CHKERRQ(PetscFree((*c)->neighbors));
59   CHKERRQ(PetscFree((*c)->needCount));
60   CHKERRQ(PetscFree((*c)->localOffsets));
61   CHKERRQ(PetscFree((*c)->fillCount));
62   CHKERRQ(PetscFree((*c)->remoteOffsets));
63   CHKERRQ(PetscFree((*c)->request));
64   CHKERRQ(PetscFree((*c)->status));
65   CHKERRQ(PetscHeaderDestroy(c));
66   PetscFunctionReturn(0);
67 }
68 
69 PetscErrorCode CharacteristicCreate(MPI_Comm comm, Characteristic *c)
70 {
71   Characteristic newC;
72 
73   PetscFunctionBegin;
74   PetscValidPointer(c, 2);
75   *c = NULL;
76   CHKERRQ(CharacteristicInitializePackage());
77 
78   CHKERRQ(PetscHeaderCreate(newC, CHARACTERISTIC_CLASSID, "Characteristic", "Characteristic", "Characteristic", comm, CharacteristicDestroy, CharacteristicView));
79   *c   = newC;
80 
81   newC->structured          = PETSC_TRUE;
82   newC->numIds              = 0;
83   newC->velocityDA          = NULL;
84   newC->velocity            = NULL;
85   newC->velocityOld         = NULL;
86   newC->numVelocityComp     = 0;
87   newC->velocityComp        = NULL;
88   newC->velocityInterp      = NULL;
89   newC->velocityInterpLocal = NULL;
90   newC->velocityCtx         = NULL;
91   newC->fieldDA             = NULL;
92   newC->field               = NULL;
93   newC->numFieldComp        = 0;
94   newC->fieldComp           = NULL;
95   newC->fieldInterp         = NULL;
96   newC->fieldInterpLocal    = NULL;
97   newC->fieldCtx            = NULL;
98   newC->itemType            = 0;
99   newC->queue               = NULL;
100   newC->queueSize           = 0;
101   newC->queueMax            = 0;
102   newC->queueLocal          = NULL;
103   newC->queueLocalSize      = 0;
104   newC->queueLocalMax       = 0;
105   newC->queueRemote         = NULL;
106   newC->queueRemoteSize     = 0;
107   newC->queueRemoteMax      = 0;
108   newC->numNeighbors        = 0;
109   newC->neighbors           = NULL;
110   newC->needCount           = NULL;
111   newC->localOffsets        = NULL;
112   newC->fillCount           = NULL;
113   newC->remoteOffsets       = NULL;
114   newC->request             = NULL;
115   newC->status              = NULL;
116   PetscFunctionReturn(0);
117 }
118 
119 /*@C
120    CharacteristicSetType - Builds Characteristic for a particular solver.
121 
122    Logically Collective on Characteristic
123 
124    Input Parameters:
125 +  c    - the method of characteristics context
126 -  type - a known method
127 
128    Options Database Key:
129 .  -characteristic_type <method> - Sets the method; use -help for a list
130     of available methods
131 
132    Notes:
133    See "include/petsccharacteristic.h" for available methods
134 
135   Normally, it is best to use the CharacteristicSetFromOptions() command and
136   then set the Characteristic type from the options database rather than by using
137   this routine.  Using the options database provides the user with
138   maximum flexibility in evaluating the many different Krylov methods.
139   The CharacteristicSetType() routine is provided for those situations where it
140   is necessary to set the iterative solver independently of the command
141   line or options database.  This might be the case, for example, when
142   the choice of iterative solver changes during the execution of the
143   program, and the user's application is taking responsibility for
144   choosing the appropriate method.  In other words, this routine is
145   not for beginners.
146 
147   Level: intermediate
148 
149 .seealso: CharacteristicType
150 
151 @*/
152 PetscErrorCode CharacteristicSetType(Characteristic c, CharacteristicType type)
153 {
154   PetscBool      match;
155   PetscErrorCode (*r)(Characteristic);
156 
157   PetscFunctionBegin;
158   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
159   PetscValidCharPointer(type, 2);
160 
161   CHKERRQ(PetscObjectTypeCompare((PetscObject) c, type, &match));
162   if (match) PetscFunctionReturn(0);
163 
164   if (c->data) {
165     /* destroy the old private Characteristic context */
166     CHKERRQ((*c->ops->destroy)(c));
167     c->ops->destroy = NULL;
168     c->data         = NULL;
169   }
170 
171   CHKERRQ(PetscFunctionListFind(CharacteristicList,type,&r));
172   PetscCheck(r,PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown Characteristic type given: %s", type);
173   c->setupcalled = 0;
174   CHKERRQ((*r)(c));
175   CHKERRQ(PetscObjectChangeTypeName((PetscObject) c, type));
176   PetscFunctionReturn(0);
177 }
178 
179 /*@
180    CharacteristicSetUp - Sets up the internal data structures for the
181    later use of an iterative solver.
182 
183    Collective on Characteristic
184 
185    Input Parameter:
186 .  ksp   - iterative context obtained from CharacteristicCreate()
187 
188    Level: developer
189 
190 .seealso: CharacteristicCreate(), CharacteristicSolve(), CharacteristicDestroy()
191 @*/
192 PetscErrorCode CharacteristicSetUp(Characteristic c)
193 {
194   PetscFunctionBegin;
195   PetscValidHeaderSpecific(c, CHARACTERISTIC_CLASSID, 1);
196 
197   if (!((PetscObject)c)->type_name) {
198     CHKERRQ(CharacteristicSetType(c, CHARACTERISTICDA));
199   }
200 
201   if (c->setupcalled == 2) PetscFunctionReturn(0);
202 
203   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_SetUp,c,NULL,NULL,NULL));
204   if (!c->setupcalled) {
205     CHKERRQ((*c->ops->setup)(c));
206   }
207   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_SetUp,c,NULL,NULL,NULL));
208   c->setupcalled = 2;
209   PetscFunctionReturn(0);
210 }
211 
212 /*@C
213   CharacteristicRegister -  Adds a solver to the method of characteristics package.
214 
215    Not Collective
216 
217    Input Parameters:
218 +  name_solver - name of a new user-defined solver
219 -  routine_create - routine to create method context
220 
221   Sample usage:
222 .vb
223     CharacteristicRegister("my_char", MyCharCreate);
224 .ve
225 
226   Then, your Characteristic type can be chosen with the procedural interface via
227 .vb
228     CharacteristicCreate(MPI_Comm, Characteristic* &char);
229     CharacteristicSetType(char,"my_char");
230 .ve
231    or at runtime via the option
232 .vb
233     -characteristic_type my_char
234 .ve
235 
236    Notes:
237    CharacteristicRegister() may be called multiple times to add several user-defined solvers.
238 
239 .seealso: CharacteristicRegisterAll(), CharacteristicRegisterDestroy()
240 
241   Level: advanced
242 @*/
243 PetscErrorCode CharacteristicRegister(const char sname[],PetscErrorCode (*function)(Characteristic))
244 {
245   PetscFunctionBegin;
246   CHKERRQ(CharacteristicInitializePackage());
247   CHKERRQ(PetscFunctionListAdd(&CharacteristicList,sname,function));
248   PetscFunctionReturn(0);
249 }
250 
251 PetscErrorCode CharacteristicSetVelocityInterpolation(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
252 {
253   PetscFunctionBegin;
254   c->velocityDA      = da;
255   c->velocity        = v;
256   c->velocityOld     = vOld;
257   c->numVelocityComp = numComponents;
258   c->velocityComp    = components;
259   c->velocityInterp  = interp;
260   c->velocityCtx     = ctx;
261   PetscFunctionReturn(0);
262 }
263 
264 PetscErrorCode CharacteristicSetVelocityInterpolationLocal(Characteristic c, DM da, Vec v, Vec vOld, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal [], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
265 {
266   PetscFunctionBegin;
267   c->velocityDA          = da;
268   c->velocity            = v;
269   c->velocityOld         = vOld;
270   c->numVelocityComp     = numComponents;
271   c->velocityComp        = components;
272   c->velocityInterpLocal = interp;
273   c->velocityCtx         = ctx;
274   PetscFunctionReturn(0);
275 }
276 
277 PetscErrorCode CharacteristicSetFieldInterpolation(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(Vec, PetscReal[], PetscInt, PetscInt[], PetscScalar[], void*), void *ctx)
278 {
279   PetscFunctionBegin;
280 #if 0
281   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
282 #endif
283   c->fieldDA      = da;
284   c->field        = v;
285   c->numFieldComp = numComponents;
286   c->fieldComp    = components;
287   c->fieldInterp  = interp;
288   c->fieldCtx     = ctx;
289   PetscFunctionReturn(0);
290 }
291 
292 PetscErrorCode CharacteristicSetFieldInterpolationLocal(Characteristic c, DM da, Vec v, PetscInt numComponents, PetscInt components[], PetscErrorCode (*interp)(void*, PetscReal[], PetscInt, PetscInt[], PetscScalar [], void*), void *ctx)
293 {
294   PetscFunctionBegin;
295 #if 0
296   PetscCheck(numComponents <= 2,PETSC_COMM_SELF,PETSC_ERR_SUP, "Fields with more than 2 components are not supported. Send mail to petsc-maint@mcs.anl.gov.");
297 #endif
298   c->fieldDA          = da;
299   c->field            = v;
300   c->numFieldComp     = numComponents;
301   c->fieldComp        = components;
302   c->fieldInterpLocal = interp;
303   c->fieldCtx         = ctx;
304   PetscFunctionReturn(0);
305 }
306 
307 PetscErrorCode CharacteristicSolve(Characteristic c, PetscReal dt, Vec solution)
308 {
309   CharacteristicPointDA2D Qi;
310   DM                      da = c->velocityDA;
311   Vec                     velocityLocal, velocityLocalOld;
312   Vec                     fieldLocal;
313   DMDALocalInfo           info;
314   PetscScalar             **solArray;
315   void                    *velocityArray;
316   void                    *velocityArrayOld;
317   void                    *fieldArray;
318   PetscScalar             *interpIndices;
319   PetscScalar             *velocityValues, *velocityValuesOld;
320   PetscScalar             *fieldValues;
321   PetscMPIInt             rank;
322   PetscInt                dim;
323   PetscMPIInt             neighbors[9];
324   PetscInt                dof;
325   PetscInt                gx, gy;
326   PetscInt                n, is, ie, js, je, comp;
327 
328   PetscFunctionBegin;
329   c->queueSize = 0;
330   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
331   CHKERRQ(DMDAGetNeighborsRank(da, neighbors));
332   CHKERRQ(CharacteristicSetNeighbors(c, 9, neighbors));
333   CHKERRQ(CharacteristicSetUp(c));
334   /* global and local grid info */
335   CHKERRQ(DMDAGetInfo(da, &dim, &gx, &gy, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL));
336   CHKERRQ(DMDAGetLocalInfo(da, &info));
337   is   = info.xs;          ie   = info.xs+info.xm;
338   js   = info.ys;          je   = info.ys+info.ym;
339   /* Allocation */
340   CHKERRQ(PetscMalloc1(dim,                &interpIndices));
341   CHKERRQ(PetscMalloc1(c->numVelocityComp, &velocityValues));
342   CHKERRQ(PetscMalloc1(c->numVelocityComp, &velocityValuesOld));
343   CHKERRQ(PetscMalloc1(c->numFieldComp,    &fieldValues));
344   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_Solve,NULL,NULL,NULL,NULL));
345 
346   /* -----------------------------------------------------------------------
347      PART 1, AT t-dt/2
348      -----------------------------------------------------------------------*/
349   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_QueueSetup,NULL,NULL,NULL,NULL));
350   /* GET POSITION AT HALF TIME IN THE PAST */
351   if (c->velocityInterpLocal) {
352     CHKERRQ(DMGetLocalVector(c->velocityDA, &velocityLocal));
353     CHKERRQ(DMGetLocalVector(c->velocityDA, &velocityLocalOld));
354     CHKERRQ(DMGlobalToLocalBegin(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
355     CHKERRQ(DMGlobalToLocalEnd(c->velocityDA, c->velocity, INSERT_VALUES, velocityLocal));
356     CHKERRQ(DMGlobalToLocalBegin(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
357     CHKERRQ(DMGlobalToLocalEnd(c->velocityDA, c->velocityOld, INSERT_VALUES, velocityLocalOld));
358     CHKERRQ(DMDAVecGetArray(c->velocityDA, velocityLocal,    &velocityArray));
359     CHKERRQ(DMDAVecGetArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
360   }
361   CHKERRQ(PetscInfo(NULL, "Calculating position at t_{n - 1/2}\n"));
362   for (Qi.j = js; Qi.j < je; Qi.j++) {
363     for (Qi.i = is; Qi.i < ie; Qi.i++) {
364       interpIndices[0] = Qi.i;
365       interpIndices[1] = Qi.j;
366       if (c->velocityInterpLocal) CHKERRQ(c->velocityInterpLocal(velocityArray, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
367       else CHKERRQ(c->velocityInterp(c->velocity, interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
368       Qi.x = Qi.i - velocityValues[0]*dt/2.0;
369       Qi.y = Qi.j - velocityValues[1]*dt/2.0;
370 
371       /* Determine whether the position at t - dt/2 is local */
372       Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
373 
374       /* Check for Periodic boundaries and move all periodic points back onto the domain */
375       CHKERRQ(DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y)));
376       CHKERRQ(CharacteristicAddPoint(c, &Qi));
377     }
378   }
379   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_QueueSetup,NULL,NULL,NULL,NULL));
380 
381   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
382   CHKERRQ(CharacteristicSendCoordinatesBegin(c));
383   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
384 
385   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_HalfTimeLocal,NULL,NULL,NULL,NULL));
386   /* Calculate velocity at t_n+1/2 (local values) */
387   CHKERRQ(PetscInfo(NULL, "Calculating local velocities at t_{n - 1/2}\n"));
388   for (n = 0; n < c->queueSize; n++) {
389     Qi = c->queue[n];
390     if (c->neighbors[Qi.proc] == rank) {
391       interpIndices[0] = Qi.x;
392       interpIndices[1] = Qi.y;
393       if (c->velocityInterpLocal) {
394         CHKERRQ(c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
395         CHKERRQ(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
396       } else {
397         CHKERRQ(c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues, c->velocityCtx));
398         CHKERRQ(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
399       }
400       Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
401       Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
402     }
403     c->queue[n] = Qi;
404   }
405   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_HalfTimeLocal,NULL,NULL,NULL,NULL));
406 
407   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
408   CHKERRQ(CharacteristicSendCoordinatesEnd(c));
409   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
410 
411   /* Calculate velocity at t_n+1/2 (fill remote requests) */
412   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_HalfTimeRemote,NULL,NULL,NULL,NULL));
413   CHKERRQ(PetscInfo(NULL, "Calculating %d remote velocities at t_{n - 1/2}\n", c->queueRemoteSize));
414   for (n = 0; n < c->queueRemoteSize; n++) {
415     Qi = c->queueRemote[n];
416     interpIndices[0] = Qi.x;
417     interpIndices[1] = Qi.y;
418     if (c->velocityInterpLocal) {
419       CHKERRQ(c->velocityInterpLocal(velocityArray,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx));
420       CHKERRQ(c->velocityInterpLocal(velocityArrayOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
421     } else {
422       CHKERRQ(c->velocityInterp(c->velocity,    interpIndices, c->numVelocityComp, c->velocityComp, velocityValues,    c->velocityCtx));
423       CHKERRQ(c->velocityInterp(c->velocityOld, interpIndices, c->numVelocityComp, c->velocityComp, velocityValuesOld, c->velocityCtx));
424     }
425     Qi.x = 0.5*(velocityValues[0] + velocityValuesOld[0]);
426     Qi.y = 0.5*(velocityValues[1] + velocityValuesOld[1]);
427     c->queueRemote[n] = Qi;
428   }
429   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_HalfTimeRemote,NULL,NULL,NULL,NULL));
430   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
431   CHKERRQ(CharacteristicGetValuesBegin(c));
432   CHKERRQ(CharacteristicGetValuesEnd(c));
433   if (c->velocityInterpLocal) {
434     CHKERRQ(DMDAVecRestoreArray(c->velocityDA, velocityLocal,    &velocityArray));
435     CHKERRQ(DMDAVecRestoreArray(c->velocityDA, velocityLocalOld, &velocityArrayOld));
436     CHKERRQ(DMRestoreLocalVector(c->velocityDA, &velocityLocal));
437     CHKERRQ(DMRestoreLocalVector(c->velocityDA, &velocityLocalOld));
438   }
439   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_HalfTimeExchange,NULL,NULL,NULL,NULL));
440 
441   /* -----------------------------------------------------------------------
442      PART 2, AT t-dt
443      -----------------------------------------------------------------------*/
444 
445   /* GET POSITION AT t_n (local values) */
446   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
447   CHKERRQ(PetscInfo(NULL, "Calculating position at t_{n}\n"));
448   for (n = 0; n < c->queueSize; n++) {
449     Qi   = c->queue[n];
450     Qi.x = Qi.i - Qi.x*dt;
451     Qi.y = Qi.j - Qi.y*dt;
452 
453     /* Determine whether the position at t-dt is local */
454     Qi.proc = DMDAGetNeighborRelative(da, Qi.x, Qi.y);
455 
456     /* Check for Periodic boundaries and move all periodic points back onto the domain */
457     CHKERRQ(DMDAMapCoordsToPeriodicDomain(da,&(Qi.x),&(Qi.y)));
458 
459     c->queue[n] = Qi;
460   }
461   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
462 
463   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
464   CHKERRQ(CharacteristicSendCoordinatesBegin(c));
465   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
466 
467   /* GET VALUE AT FULL TIME IN THE PAST (LOCAL REQUESTS) */
468   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
469   if (c->fieldInterpLocal) {
470     CHKERRQ(DMGetLocalVector(c->fieldDA, &fieldLocal));
471     CHKERRQ(DMGlobalToLocalBegin(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
472     CHKERRQ(DMGlobalToLocalEnd(c->fieldDA, c->field, INSERT_VALUES, fieldLocal));
473     CHKERRQ(DMDAVecGetArray(c->fieldDA, fieldLocal, &fieldArray));
474   }
475   CHKERRQ(PetscInfo(NULL, "Calculating local field at t_{n}\n"));
476   for (n = 0; n < c->queueSize; n++) {
477     if (c->neighbors[c->queue[n].proc] == rank) {
478       interpIndices[0] = c->queue[n].x;
479       interpIndices[1] = c->queue[n].y;
480       if (c->fieldInterpLocal) CHKERRQ(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
481       else CHKERRQ(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
482       for (comp = 0; comp < c->numFieldComp; comp++) c->queue[n].field[comp] = fieldValues[comp];
483     }
484   }
485   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_FullTimeLocal,NULL,NULL,NULL,NULL));
486 
487   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
488   CHKERRQ(CharacteristicSendCoordinatesEnd(c));
489   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
490 
491   /* GET VALUE AT FULL TIME IN THE PAST (REMOTE REQUESTS) */
492   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_FullTimeRemote,NULL,NULL,NULL,NULL));
493   CHKERRQ(PetscInfo(NULL, "Calculating %d remote field points at t_{n}\n", c->queueRemoteSize));
494   for (n = 0; n < c->queueRemoteSize; n++) {
495     interpIndices[0] = c->queueRemote[n].x;
496     interpIndices[1] = c->queueRemote[n].y;
497 
498     /* for debugging purposes */
499     if (1) { /* hacked bounds test...let's do better */
500       PetscScalar im = interpIndices[0]; PetscScalar jm = interpIndices[1];
501 
502       PetscCheck((im >= (PetscScalar) is - 1.) && (im <= (PetscScalar) ie) && (jm >= (PetscScalar)  js - 1.) && (jm <= (PetscScalar) je),PETSC_COMM_SELF,PETSC_ERR_LIB, "Nonlocal point: (%g,%g)", im, jm);
503     }
504 
505     if (c->fieldInterpLocal) CHKERRQ(c->fieldInterpLocal(fieldArray, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
506     else CHKERRQ(c->fieldInterp(c->field, interpIndices, c->numFieldComp, c->fieldComp, fieldValues, c->fieldCtx));
507     for (comp = 0; comp < c->numFieldComp; comp++) c->queueRemote[n].field[comp] = fieldValues[comp];
508   }
509   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_FullTimeRemote,NULL,NULL,NULL,NULL));
510 
511   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
512   CHKERRQ(CharacteristicGetValuesBegin(c));
513   CHKERRQ(CharacteristicGetValuesEnd(c));
514   if (c->fieldInterpLocal) {
515     CHKERRQ(DMDAVecRestoreArray(c->fieldDA, fieldLocal, &fieldArray));
516     CHKERRQ(DMRestoreLocalVector(c->fieldDA, &fieldLocal));
517   }
518   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_FullTimeExchange,NULL,NULL,NULL,NULL));
519 
520   /* Return field of characteristics at t_n-1 */
521   CHKERRQ(PetscLogEventBegin(CHARACTERISTIC_DAUpdate,NULL,NULL,NULL,NULL));
522   CHKERRQ(DMDAGetInfo(c->fieldDA,NULL,NULL,NULL,NULL,NULL,NULL,NULL,&dof,NULL,NULL,NULL,NULL,NULL));
523   CHKERRQ(DMDAVecGetArray(c->fieldDA, solution, &solArray));
524   for (n = 0; n < c->queueSize; n++) {
525     Qi = c->queue[n];
526     for (comp = 0; comp < c->numFieldComp; comp++) solArray[Qi.j][Qi.i*dof+c->fieldComp[comp]] = Qi.field[comp];
527   }
528   CHKERRQ(DMDAVecRestoreArray(c->fieldDA, solution, &solArray));
529   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_DAUpdate,NULL,NULL,NULL,NULL));
530   CHKERRQ(PetscLogEventEnd(CHARACTERISTIC_Solve,NULL,NULL,NULL,NULL));
531 
532   /* Cleanup */
533   CHKERRQ(PetscFree(interpIndices));
534   CHKERRQ(PetscFree(velocityValues));
535   CHKERRQ(PetscFree(velocityValuesOld));
536   CHKERRQ(PetscFree(fieldValues));
537   PetscFunctionReturn(0);
538 }
539 
540 PetscErrorCode CharacteristicSetNeighbors(Characteristic c, PetscInt numNeighbors, PetscMPIInt neighbors[])
541 {
542   PetscFunctionBegin;
543   c->numNeighbors = numNeighbors;
544   CHKERRQ(PetscFree(c->neighbors));
545   CHKERRQ(PetscMalloc1(numNeighbors, &c->neighbors));
546   CHKERRQ(PetscArraycpy(c->neighbors, neighbors, numNeighbors));
547   PetscFunctionReturn(0);
548 }
549 
550 PetscErrorCode CharacteristicAddPoint(Characteristic c, CharacteristicPointDA2D *point)
551 {
552   PetscFunctionBegin;
553   PetscCheck(c->queueSize < c->queueMax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE, "Exceeded maximum queue size %d", c->queueMax);
554   c->queue[c->queueSize++] = *point;
555   PetscFunctionReturn(0);
556 }
557 
558 int CharacteristicSendCoordinatesBegin(Characteristic c)
559 {
560   PetscMPIInt    rank, tag = 121;
561   PetscInt       i, n;
562 
563   PetscFunctionBegin;
564   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
565   CHKERRQ(CharacteristicHeapSort(c, c->queue, c->queueSize));
566   CHKERRQ(PetscArrayzero(c->needCount, c->numNeighbors));
567   for (i = 0;  i < c->queueSize; i++) c->needCount[c->queue[i].proc]++;
568   c->fillCount[0] = 0;
569   for (n = 1; n < c->numNeighbors; n++) {
570     CHKERRMPI(MPI_Irecv(&(c->fillCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1])));
571   }
572   for (n = 1; n < c->numNeighbors; n++) {
573     CHKERRMPI(MPI_Send(&(c->needCount[n]), 1, MPIU_INT, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
574   }
575   CHKERRMPI(MPI_Waitall(c->numNeighbors-1, c->request, c->status));
576   /* Initialize the remote queue */
577   c->queueLocalMax  = c->localOffsets[0]  = 0;
578   c->queueRemoteMax = c->remoteOffsets[0] = 0;
579   for (n = 1; n < c->numNeighbors; n++) {
580     c->remoteOffsets[n] = c->queueRemoteMax;
581     c->queueRemoteMax  += c->fillCount[n];
582     c->localOffsets[n]  = c->queueLocalMax;
583     c->queueLocalMax   += c->needCount[n];
584   }
585   /* HACK BEGIN */
586   for (n = 1; n < c->numNeighbors; n++) c->localOffsets[n] += c->needCount[0];
587   c->needCount[0] = 0;
588   /* HACK END */
589   if (c->queueRemoteMax) {
590     CHKERRQ(PetscMalloc1(c->queueRemoteMax, &c->queueRemote));
591   } else c->queueRemote = NULL;
592   c->queueRemoteSize = c->queueRemoteMax;
593 
594   /* Send and Receive requests for values at t_n+1/2, giving the coordinates for interpolation */
595   for (n = 1; n < c->numNeighbors; n++) {
596     CHKERRQ(PetscInfo(NULL, "Receiving %d requests for values from proc %d\n", c->fillCount[n], c->neighbors[n]));
597     CHKERRMPI(MPI_Irecv(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1])));
598   }
599   for (n = 1; n < c->numNeighbors; n++) {
600     CHKERRQ(PetscInfo(NULL, "Sending %d requests for values from proc %d\n", c->needCount[n], c->neighbors[n]));
601     CHKERRMPI(MPI_Send(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 PetscErrorCode CharacteristicSendCoordinatesEnd(Characteristic c)
607 {
608 #if 0
609   PetscMPIInt rank;
610   PetscInt    n;
611 #endif
612 
613   PetscFunctionBegin;
614   CHKERRMPI(MPI_Waitall(c->numNeighbors-1, c->request, c->status));
615 #if 0
616   CHKERRMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)c), &rank));
617   for (n = 0; n < c->queueRemoteSize; n++) {
618     PetscCheck(c->neighbors[c->queueRemote[n].proc] != rank,PETSC_COMM_SELF,PETSC_ERR_PLIB, "This is messed up, n = %d proc = %d", n, c->queueRemote[n].proc);
619   }
620 #endif
621   PetscFunctionReturn(0);
622 }
623 
624 PetscErrorCode CharacteristicGetValuesBegin(Characteristic c)
625 {
626   PetscMPIInt    tag = 121;
627   PetscInt       n;
628 
629   PetscFunctionBegin;
630   /* SEND AND RECIEVE FILLED REQUESTS for velocities at t_n+1/2 */
631   for (n = 1; n < c->numNeighbors; n++) {
632     CHKERRMPI(MPI_Irecv(&(c->queue[c->localOffsets[n]]), c->needCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c), &(c->request[n-1])));
633   }
634   for (n = 1; n < c->numNeighbors; n++) {
635     CHKERRMPI(MPI_Send(&(c->queueRemote[c->remoteOffsets[n]]), c->fillCount[n], c->itemType, c->neighbors[n], tag, PetscObjectComm((PetscObject)c)));
636   }
637   PetscFunctionReturn(0);
638 }
639 
640 PetscErrorCode CharacteristicGetValuesEnd(Characteristic c)
641 {
642   PetscFunctionBegin;
643   CHKERRMPI(MPI_Waitall(c->numNeighbors-1, c->request, c->status));
644   /* Free queue of requests from other procs */
645   CHKERRQ(PetscFree(c->queueRemote));
646   PetscFunctionReturn(0);
647 }
648 
649 /*---------------------------------------------------------------------*/
650 /*
651   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
652 */
653 PetscErrorCode CharacteristicHeapSort(Characteristic c, Queue queue, PetscInt size)
654 /*---------------------------------------------------------------------*/
655 {
656   CharacteristicPointDA2D temp;
657   PetscInt                n;
658 
659   PetscFunctionBegin;
660   if (0) { /* Check the order of the queue before sorting */
661     CHKERRQ(PetscInfo(NULL, "Before Heap sort\n"));
662     for (n=0; n<size; n++) {
663       CHKERRQ(PetscInfo(NULL,"%d %d\n",n,queue[n].proc));
664     }
665   }
666 
667   /* SORTING PHASE */
668   for (n = (size / 2)-1; n >= 0; n--) {
669     CHKERRQ(CharacteristicSiftDown(c, queue, n, size-1)); /* Rich had size-1 here, Matt had size*/
670   }
671   for (n = size-1; n >= 1; n--) {
672     temp     = queue[0];
673     queue[0] = queue[n];
674     queue[n] = temp;
675     CHKERRQ(CharacteristicSiftDown(c, queue, 0, n-1));
676   }
677   if (0) { /* Check the order of the queue after sorting */
678     CHKERRQ(PetscInfo(NULL, "Avter  Heap sort\n"));
679     for (n=0; n<size; n++) {
680       CHKERRQ(PetscInfo(NULL,"%d %d\n",n,queue[n].proc));
681     }
682   }
683   PetscFunctionReturn(0);
684 }
685 
686 /*---------------------------------------------------------------------*/
687 /*
688   Based on code from http://linux.wku.edu/~lamonml/algor/sort/heap.html
689 */
690 PetscErrorCode CharacteristicSiftDown(Characteristic c, Queue queue, PetscInt root, PetscInt bottom)
691 /*---------------------------------------------------------------------*/
692 {
693   PetscBool               done = PETSC_FALSE;
694   PetscInt                maxChild;
695   CharacteristicPointDA2D temp;
696 
697   PetscFunctionBegin;
698   while ((root*2 <= bottom) && (!done)) {
699     if (root*2 == bottom) maxChild = root * 2;
700     else if (queue[root*2].proc > queue[root*2+1].proc) maxChild = root * 2;
701     else maxChild = root * 2 + 1;
702 
703     if (queue[root].proc < queue[maxChild].proc) {
704       temp            = queue[root];
705       queue[root]     = queue[maxChild];
706       queue[maxChild] = temp;
707       root            = maxChild;
708     } else done = PETSC_TRUE;
709   }
710   PetscFunctionReturn(0);
711 }
712 
713 /* [center, left, top-left, top, top-right, right, bottom-right, bottom, bottom-left] */
714 PetscErrorCode DMDAGetNeighborsRank(DM da, PetscMPIInt neighbors[])
715 {
716   DMBoundaryType   bx, by;
717   PetscBool        IPeriodic = PETSC_FALSE, JPeriodic = PETSC_FALSE;
718   MPI_Comm         comm;
719   PetscMPIInt      rank;
720   PetscInt         **procs,pi,pj,pim,pip,pjm,pjp,PI,PJ;
721 
722   PetscFunctionBegin;
723   CHKERRQ(PetscObjectGetComm((PetscObject) da, &comm));
724   CHKERRMPI(MPI_Comm_rank(comm, &rank));
725   CHKERRQ(DMDAGetInfo(da, NULL, NULL, NULL, NULL, &PI,&PJ, NULL, NULL, NULL, &bx, &by,NULL, NULL));
726 
727   if (bx == DM_BOUNDARY_PERIODIC) IPeriodic = PETSC_TRUE;
728   if (by == DM_BOUNDARY_PERIODIC) JPeriodic = PETSC_TRUE;
729 
730   neighbors[0] = rank;
731   rank = 0;
732   CHKERRQ(PetscMalloc1(PJ,&procs));
733   for (pj=0; pj<PJ; pj++) {
734     CHKERRQ(PetscMalloc1(PI,&(procs[pj])));
735     for (pi=0; pi<PI; pi++) {
736       procs[pj][pi] = rank;
737       rank++;
738     }
739   }
740 
741   pi  = neighbors[0] % PI;
742   pj  = neighbors[0] / PI;
743   pim = pi-1;  if (pim<0) pim=PI-1;
744   pip = (pi+1)%PI;
745   pjm = pj-1;  if (pjm<0) pjm=PJ-1;
746   pjp = (pj+1)%PJ;
747 
748   neighbors[1] = procs[pj] [pim];
749   neighbors[2] = procs[pjp][pim];
750   neighbors[3] = procs[pjp][pi];
751   neighbors[4] = procs[pjp][pip];
752   neighbors[5] = procs[pj] [pip];
753   neighbors[6] = procs[pjm][pip];
754   neighbors[7] = procs[pjm][pi];
755   neighbors[8] = procs[pjm][pim];
756 
757   if (!IPeriodic) {
758     if (pi==0)    neighbors[1]=neighbors[2]=neighbors[8]=neighbors[0];
759     if (pi==PI-1) neighbors[4]=neighbors[5]=neighbors[6]=neighbors[0];
760   }
761 
762   if (!JPeriodic) {
763     if (pj==0)    neighbors[6]=neighbors[7]=neighbors[8]=neighbors[0];
764     if (pj==PJ-1) neighbors[2]=neighbors[3]=neighbors[4]=neighbors[0];
765   }
766 
767   for (pj = 0; pj < PJ; pj++) {
768     CHKERRQ(PetscFree(procs[pj]));
769   }
770   CHKERRQ(PetscFree(procs));
771   PetscFunctionReturn(0);
772 }
773 
774 /*
775   SUBDOMAIN NEIGHBORHOOD PROCESS MAP:
776     2 | 3 | 4
777     __|___|__
778     1 | 0 | 5
779     __|___|__
780     8 | 7 | 6
781       |   |
782 */
783 PetscInt DMDAGetNeighborRelative(DM da, PetscReal ir, PetscReal jr)
784 {
785   DMDALocalInfo  info;
786   PetscReal      is,ie,js,je;
787 
788   CHKERRQ(DMDAGetLocalInfo(da, &info));
789   is   = (PetscReal) info.xs - 0.5; ie = (PetscReal) info.xs + info.xm - 0.5;
790   js   = (PetscReal) info.ys - 0.5; je = (PetscReal) info.ys + info.ym - 0.5;
791 
792   if (ir >= is && ir <= ie) { /* center column */
793     if (jr >= js && jr <= je) return 0;
794     else if (jr < js)         return 7;
795     else                      return 3;
796   } else if (ir < is) {     /* left column */
797     if (jr >= js && jr <= je) return 1;
798     else if (jr < js)         return 8;
799     else                      return 2;
800   } else {                  /* right column */
801     if (jr >= js && jr <= je) return 5;
802     else if (jr < js)         return 6;
803     else                      return 4;
804   }
805 }
806