xref: /petsc/src/vec/is/utils/isltog.c (revision 0619917b5a674bb687c64e7daba2ab22be99af31)
1 
2 #include <petsc/private/isimpl.h> /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId          IS_LTOGM_CLASSID;
8 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping, PetscInt *, PetscInt **, PetscInt **, PetscInt ***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an `IS` suitable for traversal
20 
21   Not Collective
22 
23   Input Parameter:
24 . pointIS - The `IS` object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Level: intermediate
32 
33   Notes:
34   If the `IS` contains contiguous indices in an `ISSTRIDE`, then the indices are contained in [pStart, pEnd) and points = `NULL`.
35   Otherwise, `pStart = 0`, `pEnd = numIndices`, and points is an array of the indices. This supports the following pattern
36 .vb
37   ISGetPointRange(is, &pStart, &pEnd, &points);
38   for (p = pStart; p < pEnd; ++p) {
39     const PetscInt point = points ? points[p] : p;
40     // use point
41   }
42   ISRestorePointRange(is, &pstart, &pEnd, &points);
43 .ve
44   Hence the same code can be written for `pointIS` being a `ISSTRIDE` or `ISGENERAL`
45 
46 .seealso: [](sec_scatter), `IS`, `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
47 @*/
48 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
49 {
50   PetscInt  numCells, step = 1;
51   PetscBool isStride;
52 
53   PetscFunctionBeginHot;
54   *pStart = 0;
55   *points = NULL;
56   PetscCall(ISGetLocalSize(pointIS, &numCells));
57   PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride));
58   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
59   *pEnd = *pStart + numCells;
60   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
61   PetscFunctionReturn(PETSC_SUCCESS);
62 }
63 
64 /*@C
65   ISRestorePointRange - Destroys the traversal description created with `ISGetPointRange()`
66 
67   Not Collective
68 
69   Input Parameters:
70 + pointIS - The `IS` object
71 . pStart  - The first index, from `ISGetPointRange()`
72 . pEnd    - One past the last index, from `ISGetPointRange()`
73 - points  - The indices, from `ISGetPointRange()`
74 
75   Level: intermediate
76 
77 .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
78 @*/
79 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
80 {
81   PetscInt  step = 1;
82   PetscBool isStride;
83 
84   PetscFunctionBeginHot;
85   PetscCall(PetscObjectTypeCompare((PetscObject)pointIS, ISSTRIDE, &isStride));
86   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
87   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
88   PetscFunctionReturn(PETSC_SUCCESS);
89 }
90 
91 /*@C
92   ISGetPointSubrange - Configures the input `IS` to be a subrange for the traversal information given
93 
94   Not Collective
95 
96   Input Parameters:
97 + subpointIS - The `IS` object to be configured
98 . pStart     - The first index of the subrange
99 . pEnd       - One past the last index for the subrange
100 - points     - The indices for the entire range, from `ISGetPointRange()`
101 
102   Output Parameters:
103 . subpointIS - The `IS` object now configured to be a subrange
104 
105   Level: intermediate
106 
107   Note:
108   The input `IS` will now respond properly to calls to `ISGetPointRange()` and return the subrange.
109 
110 .seealso: [](sec_scatter), `IS`, `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()`
111 @*/
112 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
113 {
114   PetscFunctionBeginHot;
115   if (points) {
116     PetscCall(ISSetType(subpointIS, ISGENERAL));
117     PetscCall(ISGeneralSetIndices(subpointIS, pEnd - pStart, &points[pStart], PETSC_USE_POINTER));
118   } else {
119     PetscCall(ISSetType(subpointIS, ISSTRIDE));
120     PetscCall(ISStrideSetStride(subpointIS, pEnd - pStart, pStart, 1));
121   }
122   PetscFunctionReturn(PETSC_SUCCESS);
123 }
124 
125 /* -----------------------------------------------------------------------------------------*/
126 
127 /*
128     Creates the global mapping information in the ISLocalToGlobalMapping structure
129 
130     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
131 */
132 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
133 {
134   PetscInt i, *idx = mapping->indices, n = mapping->n, end, start;
135 
136   PetscFunctionBegin;
137   if (mapping->data) PetscFunctionReturn(PETSC_SUCCESS);
138   end   = 0;
139   start = PETSC_MAX_INT;
140 
141   for (i = 0; i < n; i++) {
142     if (idx[i] < 0) continue;
143     if (idx[i] < start) start = idx[i];
144     if (idx[i] > end) end = idx[i];
145   }
146   if (start > end) {
147     start = 0;
148     end   = -1;
149   }
150   mapping->globalstart = start;
151   mapping->globalend   = end;
152   if (!((PetscObject)mapping)->type_name) {
153     if ((end - start) > PetscMax(4 * n, 1000000)) {
154       PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGHASH));
155     } else {
156       PetscCall(ISLocalToGlobalMappingSetType(mapping, ISLOCALTOGLOBALMAPPINGBASIC));
157     }
158   }
159   PetscTryTypeMethod(mapping, globaltolocalmappingsetup);
160   PetscFunctionReturn(PETSC_SUCCESS);
161 }
162 
163 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
164 {
165   PetscInt                      i, *idx = mapping->indices, n = mapping->n, end, start, *globals;
166   ISLocalToGlobalMapping_Basic *map;
167 
168   PetscFunctionBegin;
169   start = mapping->globalstart;
170   end   = mapping->globalend;
171   PetscCall(PetscNew(&map));
172   PetscCall(PetscMalloc1(end - start + 2, &globals));
173   map->globals = globals;
174   for (i = 0; i < end - start + 1; i++) globals[i] = -1;
175   for (i = 0; i < n; i++) {
176     if (idx[i] < 0) continue;
177     globals[idx[i] - start] = i;
178   }
179   mapping->data = (void *)map;
180   PetscFunctionReturn(PETSC_SUCCESS);
181 }
182 
183 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
184 {
185   PetscInt                     i, *idx = mapping->indices, n = mapping->n;
186   ISLocalToGlobalMapping_Hash *map;
187 
188   PetscFunctionBegin;
189   PetscCall(PetscNew(&map));
190   PetscCall(PetscHMapICreate(&map->globalht));
191   for (i = 0; i < n; i++) {
192     if (idx[i] < 0) continue;
193     PetscCall(PetscHMapISet(map->globalht, idx[i], i));
194   }
195   mapping->data = (void *)map;
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
199 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
200 {
201   ISLocalToGlobalMapping_Basic *map = (ISLocalToGlobalMapping_Basic *)mapping->data;
202 
203   PetscFunctionBegin;
204   if (!map) PetscFunctionReturn(PETSC_SUCCESS);
205   PetscCall(PetscFree(map->globals));
206   PetscCall(PetscFree(mapping->data));
207   PetscFunctionReturn(PETSC_SUCCESS);
208 }
209 
210 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
211 {
212   ISLocalToGlobalMapping_Hash *map = (ISLocalToGlobalMapping_Hash *)mapping->data;
213 
214   PetscFunctionBegin;
215   if (!map) PetscFunctionReturn(PETSC_SUCCESS);
216   PetscCall(PetscHMapIDestroy(&map->globalht));
217   PetscCall(PetscFree(mapping->data));
218   PetscFunctionReturn(PETSC_SUCCESS);
219 }
220 
221 #define GTOLTYPE _Basic
222 #define GTOLNAME _Basic
223 #define GTOLBS   mapping->bs
224 #define GTOL(g, local) \
225   do { \
226     local = map->globals[g / bs - start]; \
227     if (local >= 0) local = bs * local + (g % bs); \
228   } while (0)
229 
230 #include <../src/vec/is/utils/isltog.h>
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME Block_Basic
234 #define GTOLBS   1
235 #define GTOL(g, local) \
236   do { \
237     local = map->globals[g - start]; \
238   } while (0)
239 #include <../src/vec/is/utils/isltog.h>
240 
241 #define GTOLTYPE _Hash
242 #define GTOLNAME _Hash
243 #define GTOLBS   mapping->bs
244 #define GTOL(g, local) \
245   do { \
246     (void)PetscHMapIGet(map->globalht, g / bs, &local); \
247     if (local >= 0) local = bs * local + (g % bs); \
248   } while (0)
249 #include <../src/vec/is/utils/isltog.h>
250 
251 #define GTOLTYPE _Hash
252 #define GTOLNAME Block_Hash
253 #define GTOLBS   1
254 #define GTOL(g, local) \
255   do { \
256     (void)PetscHMapIGet(map->globalht, g, &local); \
257   } while (0)
258 #include <../src/vec/is/utils/isltog.h>
259 
260 /*@
261   ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
262 
263   Not Collective
264 
265   Input Parameter:
266 . ltog - local to global mapping
267 
268   Output Parameter:
269 . nltog - the duplicated local to global mapping
270 
271   Level: advanced
272 
273 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
274 @*/
275 PetscErrorCode ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog, ISLocalToGlobalMapping *nltog)
276 {
277   ISLocalToGlobalMappingType l2gtype;
278 
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
281   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog), ltog->bs, ltog->n, ltog->indices, PETSC_COPY_VALUES, nltog));
282   PetscCall(ISLocalToGlobalMappingGetType(ltog, &l2gtype));
283   PetscCall(ISLocalToGlobalMappingSetType(*nltog, l2gtype));
284   PetscFunctionReturn(PETSC_SUCCESS);
285 }
286 
287 /*@
288   ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
289 
290   Not Collective
291 
292   Input Parameter:
293 . mapping - local to global mapping
294 
295   Output Parameter:
296 . n - the number of entries in the local mapping, `ISLocalToGlobalMappingGetIndices()` returns an array of this length
297 
298   Level: advanced
299 
300 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
301 @*/
302 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping, PetscInt *n)
303 {
304   PetscFunctionBegin;
305   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
306   PetscAssertPointer(n, 2);
307   *n = mapping->bs * mapping->n;
308   PetscFunctionReturn(PETSC_SUCCESS);
309 }
310 
311 /*@C
312   ISLocalToGlobalMappingViewFromOptions - View an `ISLocalToGlobalMapping` based on values in the options database
313 
314   Collective
315 
316   Input Parameters:
317 + A    - the local to global mapping object
318 . obj  - Optional object that provides the options prefix used for the options database query
319 - name - command line option
320 
321   Level: intermediate
322 
323   Note:
324   See `PetscObjectViewFromOptions()` for the available `PetscViewer` and `PetscViewerFormat`
325 
326 .seealso: [](sec_scatter), `PetscViewer`, ``ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()`
327 @*/
328 PetscErrorCode ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A, PetscObject obj, const char name[])
329 {
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(A, IS_LTOGM_CLASSID, 1);
332   PetscCall(PetscObjectViewFromOptions((PetscObject)A, obj, name));
333   PetscFunctionReturn(PETSC_SUCCESS);
334 }
335 
336 /*@C
337   ISLocalToGlobalMappingView - View a local to global mapping
338 
339   Not Collective
340 
341   Input Parameters:
342 + mapping - local to global mapping
343 - viewer  - viewer
344 
345   Level: advanced
346 
347 .seealso: [](sec_scatter), `PetscViewer`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
348 @*/
349 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping, PetscViewer viewer)
350 {
351   PetscInt    i;
352   PetscMPIInt rank;
353   PetscBool   iascii;
354 
355   PetscFunctionBegin;
356   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
357   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping), &viewer));
358   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
359 
360   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping), &rank));
361   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
362   if (iascii) {
363     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping, viewer));
364     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
365     for (i = 0; i < mapping->n; i++) {
366       PetscInt bs = mapping->bs, g = mapping->indices[i];
367       if (bs == 1) PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n", rank, i, g));
368       else PetscCall(PetscViewerASCIISynchronizedPrintf(viewer, "[%d] %" PetscInt_FMT ":%" PetscInt_FMT " %" PetscInt_FMT ":%" PetscInt_FMT "\n", rank, i * bs, (i + 1) * bs, g * bs, (g + 1) * bs));
369     }
370     PetscCall(PetscViewerFlush(viewer));
371     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
372   }
373   PetscFunctionReturn(PETSC_SUCCESS);
374 }
375 
376 /*@
377   ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
378   ordering and a global parallel ordering.
379 
380   Not Collective
381 
382   Input Parameter:
383 . is - index set containing the global numbers for each local number
384 
385   Output Parameter:
386 . mapping - new mapping data structure
387 
388   Level: advanced
389 
390   Note:
391   the block size of the `IS` determines the block size of the mapping
392 
393 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()`
394 @*/
395 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is, ISLocalToGlobalMapping *mapping)
396 {
397   PetscInt        n, bs;
398   const PetscInt *indices;
399   MPI_Comm        comm;
400   PetscBool       isblock;
401 
402   PetscFunctionBegin;
403   PetscValidHeaderSpecific(is, IS_CLASSID, 1);
404   PetscAssertPointer(mapping, 2);
405 
406   PetscCall(PetscObjectGetComm((PetscObject)is, &comm));
407   PetscCall(ISGetLocalSize(is, &n));
408   PetscCall(PetscObjectTypeCompare((PetscObject)is, ISBLOCK, &isblock));
409   if (!isblock) {
410     PetscCall(ISGetIndices(is, &indices));
411     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, n, indices, PETSC_COPY_VALUES, mapping));
412     PetscCall(ISRestoreIndices(is, &indices));
413   } else {
414     PetscCall(ISGetBlockSize(is, &bs));
415     PetscCall(ISBlockGetIndices(is, &indices));
416     PetscCall(ISLocalToGlobalMappingCreate(comm, bs, n / bs, indices, PETSC_COPY_VALUES, mapping));
417     PetscCall(ISBlockRestoreIndices(is, &indices));
418   }
419   PetscFunctionReturn(PETSC_SUCCESS);
420 }
421 
422 /*@C
423   ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
424   ordering and a global parallel ordering.
425 
426   Collective
427 
428   Input Parameters:
429 + sf    - star forest mapping contiguous local indices to (rank, offset)
430 - start - first global index on this process, or `PETSC_DECIDE` to compute contiguous global numbering automatically
431 
432   Output Parameter:
433 . mapping - new mapping data structure
434 
435   Level: advanced
436 
437   Note:
438   If any processor calls this with `start` = `PETSC_DECIDE` then all processors must, otherwise the program will hang.
439 
440 .seealso: [](sec_scatter), `PetscSF`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`
441 @*/
442 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf, PetscInt start, ISLocalToGlobalMapping *mapping)
443 {
444   PetscInt i, maxlocal, nroots, nleaves, *globals, *ltog;
445   MPI_Comm comm;
446 
447   PetscFunctionBegin;
448   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
449   PetscAssertPointer(mapping, 3);
450   PetscCall(PetscObjectGetComm((PetscObject)sf, &comm));
451   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, NULL, NULL));
452   if (start == PETSC_DECIDE) {
453     start = 0;
454     PetscCallMPI(MPI_Exscan(&nroots, &start, 1, MPIU_INT, MPI_SUM, comm));
455   } else PetscCheck(start >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
456   PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal));
457   ++maxlocal;
458   PetscCall(PetscMalloc1(nroots, &globals));
459   PetscCall(PetscMalloc1(maxlocal, &ltog));
460   for (i = 0; i < nroots; i++) globals[i] = start + i;
461   for (i = 0; i < maxlocal; i++) ltog[i] = -1;
462   PetscCall(PetscSFBcastBegin(sf, MPIU_INT, globals, ltog, MPI_REPLACE));
463   PetscCall(PetscSFBcastEnd(sf, MPIU_INT, globals, ltog, MPI_REPLACE));
464   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, maxlocal, ltog, PETSC_OWN_POINTER, mapping));
465   PetscCall(PetscFree(globals));
466   PetscFunctionReturn(PETSC_SUCCESS);
467 }
468 
469 /*@
470   ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
471 
472   Not Collective
473 
474   Input Parameters:
475 + mapping - mapping data structure
476 - bs      - the blocksize
477 
478   Level: advanced
479 
480 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
481 @*/
482 PetscErrorCode ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping, PetscInt bs)
483 {
484   PetscInt       *nid;
485   const PetscInt *oid;
486   PetscInt        i, cn, on, obs, nn;
487 
488   PetscFunctionBegin;
489   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
490   PetscCheck(bs >= 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid block size %" PetscInt_FMT, bs);
491   if (bs == mapping->bs) PetscFunctionReturn(PETSC_SUCCESS);
492   on  = mapping->n;
493   obs = mapping->bs;
494   oid = mapping->indices;
495   nn  = (on * obs) / bs;
496   PetscCheck((on * obs) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT, bs, obs, on);
497 
498   PetscCall(PetscMalloc1(nn, &nid));
499   PetscCall(ISLocalToGlobalMappingGetIndices(mapping, &oid));
500   for (i = 0; i < nn; i++) {
501     PetscInt j;
502     for (j = 0, cn = 0; j < bs - 1; j++) {
503       if (oid[i * bs + j] < 0) {
504         cn++;
505         continue;
506       }
507       PetscCheck(oid[i * bs + j] == oid[i * bs + j + 1] - 1, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT, bs, obs, oid[i * bs + j], oid[i * bs + j + 1]);
508     }
509     if (oid[i * bs + j] < 0) cn++;
510     if (cn) {
511       PetscCheck(cn == bs, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT, bs, obs, cn);
512       nid[i] = -1;
513     } else {
514       nid[i] = oid[i * bs] / bs;
515     }
516   }
517   PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping, &oid));
518 
519   mapping->n  = nn;
520   mapping->bs = bs;
521   PetscCall(PetscFree(mapping->indices));
522   mapping->indices     = nid;
523   mapping->globalstart = 0;
524   mapping->globalend   = 0;
525 
526   /* reset the cached information */
527   PetscCall(PetscFree(mapping->info_procs));
528   PetscCall(PetscFree(mapping->info_numprocs));
529   if (mapping->info_indices) {
530     PetscInt i;
531 
532     PetscCall(PetscFree((mapping->info_indices)[0]));
533     for (i = 1; i < mapping->info_nproc; i++) PetscCall(PetscFree(mapping->info_indices[i]));
534     PetscCall(PetscFree(mapping->info_indices));
535   }
536   mapping->info_cached = PETSC_FALSE;
537 
538   PetscTryTypeMethod(mapping, destroy);
539   PetscFunctionReturn(PETSC_SUCCESS);
540 }
541 
542 /*@
543   ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
544   ordering and a global parallel ordering.
545 
546   Not Collective
547 
548   Input Parameter:
549 . mapping - mapping data structure
550 
551   Output Parameter:
552 . bs - the blocksize
553 
554   Level: advanced
555 
556 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
557 @*/
558 PetscErrorCode ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping, PetscInt *bs)
559 {
560   PetscFunctionBegin;
561   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
562   *bs = mapping->bs;
563   PetscFunctionReturn(PETSC_SUCCESS);
564 }
565 
566 /*@
567   ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
568   ordering and a global parallel ordering.
569 
570   Not Collective, but communicator may have more than one process
571 
572   Input Parameters:
573 + comm    - MPI communicator
574 . bs      - the block size
575 . n       - the number of local elements divided by the block size, or equivalently the number of block indices
576 . indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
577 - mode    - see PetscCopyMode
578 
579   Output Parameter:
580 . mapping - new mapping data structure
581 
582   Level: advanced
583 
584   Notes:
585   There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
586 
587   For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType`
588   of `ISLOCALTOGLOBALMAPPINGBASIC` will be used; this uses more memory but is faster; this approach is not scalable for extremely large mappings.
589 
590   For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
591   Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option
592   `-islocaltoglobalmapping_type` <`basic`,`hash`> to control which is used.
593 
594 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`,
595           `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
596           `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
597 @*/
598 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm comm, PetscInt bs, PetscInt n, const PetscInt indices[], PetscCopyMode mode, ISLocalToGlobalMapping *mapping)
599 {
600   PetscInt *in;
601 
602   PetscFunctionBegin;
603   if (n) PetscAssertPointer(indices, 4);
604   PetscAssertPointer(mapping, 6);
605 
606   *mapping = NULL;
607   PetscCall(ISInitializePackage());
608 
609   PetscCall(PetscHeaderCreate(*mapping, IS_LTOGM_CLASSID, "ISLocalToGlobalMapping", "Local to global mapping", "IS", comm, ISLocalToGlobalMappingDestroy, ISLocalToGlobalMappingView));
610   (*mapping)->n  = n;
611   (*mapping)->bs = bs;
612   if (mode == PETSC_COPY_VALUES) {
613     PetscCall(PetscMalloc1(n, &in));
614     PetscCall(PetscArraycpy(in, indices, n));
615     (*mapping)->indices         = in;
616     (*mapping)->dealloc_indices = PETSC_TRUE;
617   } else if (mode == PETSC_OWN_POINTER) {
618     (*mapping)->indices         = (PetscInt *)indices;
619     (*mapping)->dealloc_indices = PETSC_TRUE;
620   } else if (mode == PETSC_USE_POINTER) {
621     (*mapping)->indices = (PetscInt *)indices;
622   } else SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid mode %d", mode);
623   PetscFunctionReturn(PETSC_SUCCESS);
624 }
625 
626 PetscFunctionList ISLocalToGlobalMappingList = NULL;
627 
628 /*@
629   ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
630 
631   Not Collective
632 
633   Input Parameter:
634 . mapping - mapping data structure
635 
636   Options Database Key:
637 . -islocaltoglobalmapping_type - <basic,hash> nonscalable and scalable versions
638 
639   Level: advanced
640 
641 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingDestroy()`,
642 `ISLocalToGlobalMappingCreateIS()`, `ISLOCALTOGLOBALMAPPINGBASIC`,
643 `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
644 @*/
645 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
646 {
647   char                       type[256];
648   ISLocalToGlobalMappingType defaulttype = "Not set";
649   PetscBool                  flg;
650 
651   PetscFunctionBegin;
652   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
653   PetscCall(ISLocalToGlobalMappingRegisterAll());
654   PetscObjectOptionsBegin((PetscObject)mapping);
655   PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type", "ISLocalToGlobalMapping method", "ISLocalToGlobalMappingSetType", ISLocalToGlobalMappingList, (char *)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype, type, 256, &flg));
656   if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping, type));
657   PetscOptionsEnd();
658   PetscFunctionReturn(PETSC_SUCCESS);
659 }
660 
661 /*@
662   ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
663   ordering and a global parallel ordering.
664 
665   Not Collective
666 
667   Input Parameter:
668 . mapping - mapping data structure
669 
670   Level: advanced
671 
672 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()`
673 @*/
674 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
675 {
676   PetscFunctionBegin;
677   if (!*mapping) PetscFunctionReturn(PETSC_SUCCESS);
678   PetscValidHeaderSpecific((*mapping), IS_LTOGM_CLASSID, 1);
679   if (--((PetscObject)(*mapping))->refct > 0) {
680     *mapping = NULL;
681     PetscFunctionReturn(PETSC_SUCCESS);
682   }
683   if ((*mapping)->dealloc_indices) PetscCall(PetscFree((*mapping)->indices));
684   PetscCall(PetscFree((*mapping)->info_procs));
685   PetscCall(PetscFree((*mapping)->info_numprocs));
686   if ((*mapping)->info_indices) {
687     PetscInt i;
688 
689     PetscCall(PetscFree(((*mapping)->info_indices)[0]));
690     for (i = 1; i < (*mapping)->info_nproc; i++) PetscCall(PetscFree(((*mapping)->info_indices)[i]));
691     PetscCall(PetscFree((*mapping)->info_indices));
692   }
693   if ((*mapping)->info_nodei) PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
694   PetscCall(PetscFree2((*mapping)->info_nodec, (*mapping)->info_nodei));
695   if ((*mapping)->ops->destroy) PetscCall((*(*mapping)->ops->destroy)(*mapping));
696   PetscCall(PetscHeaderDestroy(mapping));
697   *mapping = NULL;
698   PetscFunctionReturn(PETSC_SUCCESS);
699 }
700 
701 /*@
702   ISLocalToGlobalMappingApplyIS - Creates from an `IS` in the local numbering
703   a new index set using the global numbering defined in an `ISLocalToGlobalMapping`
704   context.
705 
706   Collective
707 
708   Input Parameters:
709 + mapping - mapping between local and global numbering
710 - is      - index set in local numbering
711 
712   Output Parameter:
713 . newis - index set in global numbering
714 
715   Level: advanced
716 
717   Note:
718   The output `IS` will have the same communicator as the input `IS`.
719 
720 .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
721           `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()`
722 @*/
723 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping, IS is, IS *newis)
724 {
725   PetscInt        n, *idxout;
726   const PetscInt *idxin;
727 
728   PetscFunctionBegin;
729   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
730   PetscValidHeaderSpecific(is, IS_CLASSID, 2);
731   PetscAssertPointer(newis, 3);
732 
733   PetscCall(ISGetLocalSize(is, &n));
734   PetscCall(ISGetIndices(is, &idxin));
735   PetscCall(PetscMalloc1(n, &idxout));
736   PetscCall(ISLocalToGlobalMappingApply(mapping, n, idxin, idxout));
737   PetscCall(ISRestoreIndices(is, &idxin));
738   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is), n, idxout, PETSC_OWN_POINTER, newis));
739   PetscFunctionReturn(PETSC_SUCCESS);
740 }
741 
742 /*@
743   ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
744   and converts them to the global numbering.
745 
746   Not Collective
747 
748   Input Parameters:
749 + mapping - the local to global mapping context
750 . N       - number of integers
751 - in      - input indices in local numbering
752 
753   Output Parameter:
754 . out - indices in global numbering
755 
756   Level: advanced
757 
758   Note:
759   The `in` and `out` array parameters may be identical.
760 
761 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
762           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
763           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
764 @*/
765 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[])
766 {
767   PetscInt i, bs, Nmax;
768 
769   PetscFunctionBegin;
770   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
771   bs   = mapping->bs;
772   Nmax = bs * mapping->n;
773   if (bs == 1) {
774     const PetscInt *idx = mapping->indices;
775     for (i = 0; i < N; i++) {
776       if (in[i] < 0) {
777         out[i] = in[i];
778         continue;
779       }
780       PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i);
781       out[i] = idx[in[i]];
782     }
783   } else {
784     const PetscInt *idx = mapping->indices;
785     for (i = 0; i < N; i++) {
786       if (in[i] < 0) {
787         out[i] = in[i];
788         continue;
789       }
790       PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i);
791       out[i] = idx[in[i] / bs] * bs + (in[i] % bs);
792     }
793   }
794   PetscFunctionReturn(PETSC_SUCCESS);
795 }
796 
797 /*@
798   ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
799 
800   Not Collective
801 
802   Input Parameters:
803 + mapping - the local to global mapping context
804 . N       - number of integers
805 - in      - input indices in local block numbering
806 
807   Output Parameter:
808 . out - indices in global block numbering
809 
810   Example:
811   If the index values are {0,1,6,7} set with a call to `ISLocalToGlobalMappingCreate`(`PETSC_COMM_SELF`,2,2,{0,3}) then the mapping applied to 0
812   (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
813 
814   Level: advanced
815 
816   Note:
817   The `in` and `out` array parameters may be identical.
818 
819 .seealso: [](sec_scatter), `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
820           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
821           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
822 @*/
823 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping, PetscInt N, const PetscInt in[], PetscInt out[])
824 {
825   PetscInt        i, Nmax;
826   const PetscInt *idx;
827 
828   PetscFunctionBegin;
829   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
830   Nmax = mapping->n;
831   idx  = mapping->indices;
832   for (i = 0; i < N; i++) {
833     if (in[i] < 0) {
834       out[i] = in[i];
835       continue;
836     }
837     PetscCheck(in[i] < Nmax, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT, in[i], Nmax - 1, i);
838     out[i] = idx[in[i]];
839   }
840   PetscFunctionReturn(PETSC_SUCCESS);
841 }
842 
843 /*@
844   ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
845   specified with a global numbering.
846 
847   Not Collective
848 
849   Input Parameters:
850 + mapping - mapping between local and global numbering
851 . type    - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
852            `IS_GTOLM_DROP` - drops the indices with no local value from the output list
853 . n       - number of global indices to map
854 - idx     - global indices to map
855 
856   Output Parameters:
857 + nout   - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n)
858 - idxout - local index of each global index, one must pass in an array long enough
859              to hold all the indices. You can call `ISGlobalToLocalMappingApply()` with
860              idxout == NULL to determine the required length (returned in nout)
861              and then allocate the required space and call `ISGlobalToLocalMappingApply()`
862              a second time to set the values.
863 
864   Level: advanced
865 
866   Notes:
867   Either `nout` or `idxout` may be `NULL`. `idx` and `idxout` may be identical.
868 
869   For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of
870   `ISLOCALTOGLOBALMAPPINGBASIC` will be used;
871   this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
872   Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
873 
874   Developer Notes:
875   The manual page states that `idx` and `idxout` may be identical but the calling
876   sequence declares `idx` as const so it cannot be the same as `idxout`.
877 
878 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`,
879           `ISLocalToGlobalMappingDestroy()`
880 @*/
881 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[])
882 {
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
885   if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping));
886   PetscUseTypeMethod(mapping, globaltolocalmappingapply, type, n, idx, nout, idxout);
887   PetscFunctionReturn(PETSC_SUCCESS);
888 }
889 
890 /*@
891   ISGlobalToLocalMappingApplyIS - Creates from an `IS` in the global numbering
892   a new index set using the local numbering defined in an `ISLocalToGlobalMapping`
893   context.
894 
895   Not Collective
896 
897   Input Parameters:
898 + mapping - mapping between local and global numbering
899 . type    - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
900            `IS_GTOLM_DROP` - drops the indices with no local value from the output list
901 - is      - index set in global numbering
902 
903   Output Parameter:
904 . newis - index set in local numbering
905 
906   Level: advanced
907 
908   Note:
909   The output `IS` will be sequential, as it encodes a purely local operation
910 
911 .seealso: [](sec_scatter), `ISGlobalToLocalMapping`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
912           `ISLocalToGlobalMappingDestroy()`
913 @*/
914 PetscErrorCode ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, IS is, IS *newis)
915 {
916   PetscInt        n, nout, *idxout;
917   const PetscInt *idxin;
918 
919   PetscFunctionBegin;
920   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
921   PetscValidHeaderSpecific(is, IS_CLASSID, 3);
922   PetscAssertPointer(newis, 4);
923 
924   PetscCall(ISGetLocalSize(is, &n));
925   PetscCall(ISGetIndices(is, &idxin));
926   if (type == IS_GTOLM_MASK) {
927     PetscCall(PetscMalloc1(n, &idxout));
928   } else {
929     PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, NULL));
930     PetscCall(PetscMalloc1(nout, &idxout));
931   }
932   PetscCall(ISGlobalToLocalMappingApply(mapping, type, n, idxin, &nout, idxout));
933   PetscCall(ISRestoreIndices(is, &idxin));
934   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nout, idxout, PETSC_OWN_POINTER, newis));
935   PetscFunctionReturn(PETSC_SUCCESS);
936 }
937 
938 /*@
939   ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
940   specified with a block global numbering.
941 
942   Not Collective
943 
944   Input Parameters:
945 + mapping - mapping between local and global numbering
946 . type    - `IS_GTOLM_MASK` - maps global indices with no local value to -1 in the output list (i.e., mask them)
947            `IS_GTOLM_DROP` - drops the indices with no local value from the output list
948 . n       - number of global indices to map
949 - idx     - global indices to map
950 
951   Output Parameters:
952 + nout   - number of indices in output array (if type == `IS_GTOLM_MASK` then nout = n)
953 - idxout - local index of each global index, one must pass in an array long enough
954              to hold all the indices. You can call `ISGlobalToLocalMappingApplyBlock()` with
955              idxout == NULL to determine the required length (returned in nout)
956              and then allocate the required space and call `ISGlobalToLocalMappingApplyBlock()`
957              a second time to set the values.
958 
959   Level: advanced
960 
961   Notes:
962   Either `nout` or `idxout` may be `NULL`. `idx` and `idxout` may be identical.
963 
964   For "small" problems when using `ISGlobalToLocalMappingApply()` and `ISGlobalToLocalMappingApplyBlock()`, the `ISLocalToGlobalMappingType` of
965   `ISLOCALTOGLOBALMAPPINGBASIC` will be used;
966   this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems `ISLOCALTOGLOBALMAPPINGHASH` is used, this is scalable.
967   Use `ISLocalToGlobalMappingSetType()` or call `ISLocalToGlobalMappingSetFromOptions()` with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
968 
969   Developer Notes:
970   The manual page states that `idx` and `idxout` may be identical but the calling
971   sequence declares `idx` as const so it cannot be the same as `idxout`.
972 
973 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
974           `ISLocalToGlobalMappingDestroy()`
975 @*/
976 PetscErrorCode ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping, ISGlobalToLocalMappingMode type, PetscInt n, const PetscInt idx[], PetscInt *nout, PetscInt idxout[])
977 {
978   PetscFunctionBegin;
979   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
980   if (!mapping->data) PetscCall(ISGlobalToLocalMappingSetUp(mapping));
981   PetscUseTypeMethod(mapping, globaltolocalmappingapplyblock, type, n, idx, nout, idxout);
982   PetscFunctionReturn(PETSC_SUCCESS);
983 }
984 
985 /*@C
986   ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
987   each index shared by more than one processor
988 
989   Collective
990 
991   Input Parameter:
992 . mapping - the mapping from local to global indexing
993 
994   Output Parameters:
995 + nproc    - number of processors that are connected to this one
996 . procs    - neighboring processors
997 . numprocs - number of indices for each subdomain (processor)
998 - indices  - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
999 
1000   Level: advanced
1001 
1002   Fortran Notes:
1003   There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that
1004   `procs`[], `numprocs`[] and `indices`[][] are large enough arrays, either by allocating them
1005   dynamically or defining static ones large enough.
1006 .vb
1007   PetscInt indices[nproc][numprocmax],ierr)
1008   ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1009   ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1010 .ve
1011 
1012 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1013           `ISLocalToGlobalMappingRestoreInfo()`
1014 @*/
1015 PetscErrorCode ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
1019   if (mapping->info_cached) {
1020     *nproc    = mapping->info_nproc;
1021     *procs    = mapping->info_procs;
1022     *numprocs = mapping->info_numprocs;
1023     *indices  = mapping->info_indices;
1024   } else {
1025     PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping, nproc, procs, numprocs, indices));
1026   }
1027   PetscFunctionReturn(PETSC_SUCCESS);
1028 }
1029 
1030 static PetscErrorCode ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1031 {
1032   PetscMPIInt  size, rank, tag1, tag2, tag3, *len, *source, imdex;
1033   PetscInt     i, n = mapping->n, Ng, ng, max = 0, *lindices = mapping->indices;
1034   PetscInt    *nprocs, *owner, nsends, *sends, j, *starts, nmax, nrecvs, *recvs, proc;
1035   PetscInt     cnt, scale, *ownedsenders, *nownedsenders, rstart;
1036   PetscInt     node, nownedm, nt, *sends2, nsends2, *starts2, *lens2, *dest, nrecvs2, *starts3, *recvs2, k, *bprocs, *tmp;
1037   PetscInt     first_procs, first_numprocs, *first_indices;
1038   MPI_Request *recv_waits, *send_waits;
1039   MPI_Status   recv_status, *send_status, *recv_statuses;
1040   MPI_Comm     comm;
1041   PetscBool    debug = PETSC_FALSE;
1042 
1043   PetscFunctionBegin;
1044   PetscCall(PetscObjectGetComm((PetscObject)mapping, &comm));
1045   PetscCallMPI(MPI_Comm_size(comm, &size));
1046   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1047   if (size == 1) {
1048     *nproc = 0;
1049     *procs = NULL;
1050     PetscCall(PetscNew(numprocs));
1051     (*numprocs)[0] = 0;
1052     PetscCall(PetscNew(indices));
1053     (*indices)[0] = NULL;
1054     /* save info for reuse */
1055     mapping->info_nproc    = *nproc;
1056     mapping->info_procs    = *procs;
1057     mapping->info_numprocs = *numprocs;
1058     mapping->info_indices  = *indices;
1059     mapping->info_cached   = PETSC_TRUE;
1060     PetscFunctionReturn(PETSC_SUCCESS);
1061   }
1062 
1063   PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options, NULL, "-islocaltoglobalmappinggetinfo_debug", &debug, NULL));
1064 
1065   /*
1066     Notes on ISLocalToGlobalMappingGetBlockInfo
1067 
1068     globally owned node - the nodes that have been assigned to this processor in global
1069            numbering, just for this routine.
1070 
1071     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1072            boundary (i.e. is has more than one local owner)
1073 
1074     locally owned node - node that exists on this processors subdomain
1075 
1076     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1077            local subdomain
1078   */
1079   PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag1));
1080   PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag2));
1081   PetscCall(PetscObjectGetNewTag((PetscObject)mapping, &tag3));
1082 
1083   for (i = 0; i < n; i++) {
1084     if (lindices[i] > max) max = lindices[i];
1085   }
1086   PetscCall(MPIU_Allreduce(&max, &Ng, 1, MPIU_INT, MPI_MAX, comm));
1087   Ng++;
1088   PetscCallMPI(MPI_Comm_size(comm, &size));
1089   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1090   scale = Ng / size + 1;
1091   ng    = scale;
1092   if (rank == size - 1) ng = Ng - scale * (size - 1);
1093   ng     = PetscMax(1, ng);
1094   rstart = scale * rank;
1095 
1096   /* determine ownership ranges of global indices */
1097   PetscCall(PetscMalloc1(2 * size, &nprocs));
1098   PetscCall(PetscArrayzero(nprocs, 2 * size));
1099 
1100   /* determine owners of each local node  */
1101   PetscCall(PetscMalloc1(n, &owner));
1102   for (i = 0; i < n; i++) {
1103     proc                 = lindices[i] / scale; /* processor that globally owns this index */
1104     nprocs[2 * proc + 1] = 1;                   /* processor globally owns at least one of ours */
1105     owner[i]             = proc;
1106     nprocs[2 * proc]++; /* count of how many that processor globally owns of ours */
1107   }
1108   nsends = 0;
1109   for (i = 0; i < size; i++) nsends += nprocs[2 * i + 1];
1110   PetscCall(PetscInfo(mapping, "Number of global owners for my local data %" PetscInt_FMT "\n", nsends));
1111 
1112   /* inform other processors of number of messages and max length*/
1113   PetscCall(PetscMaxSum(comm, nprocs, &nmax, &nrecvs));
1114   PetscCall(PetscInfo(mapping, "Number of local owners for my global data %" PetscInt_FMT "\n", nrecvs));
1115 
1116   /* post receives for owned rows */
1117   PetscCall(PetscMalloc1((2 * nrecvs + 1) * (nmax + 1), &recvs));
1118   PetscCall(PetscMalloc1(nrecvs + 1, &recv_waits));
1119   for (i = 0; i < nrecvs; i++) PetscCallMPI(MPI_Irecv(recvs + 2 * nmax * i, 2 * nmax, MPIU_INT, MPI_ANY_SOURCE, tag1, comm, recv_waits + i));
1120 
1121   /* pack messages containing lists of local nodes to owners */
1122   PetscCall(PetscMalloc1(2 * n + 1, &sends));
1123   PetscCall(PetscMalloc1(size + 1, &starts));
1124   starts[0] = 0;
1125   for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2];
1126   for (i = 0; i < n; i++) {
1127     sends[starts[owner[i]]++] = lindices[i];
1128     sends[starts[owner[i]]++] = i;
1129   }
1130   PetscCall(PetscFree(owner));
1131   starts[0] = 0;
1132   for (i = 1; i < size; i++) starts[i] = starts[i - 1] + 2 * nprocs[2 * i - 2];
1133 
1134   /* send the messages */
1135   PetscCall(PetscMalloc1(nsends + 1, &send_waits));
1136   PetscCall(PetscMalloc1(nsends + 1, &dest));
1137   cnt = 0;
1138   for (i = 0; i < size; i++) {
1139     if (nprocs[2 * i]) {
1140       PetscCallMPI(MPI_Isend(sends + starts[i], 2 * nprocs[2 * i], MPIU_INT, i, tag1, comm, send_waits + cnt));
1141       dest[cnt] = i;
1142       cnt++;
1143     }
1144   }
1145   PetscCall(PetscFree(starts));
1146 
1147   /* wait on receives */
1148   PetscCall(PetscMalloc1(nrecvs + 1, &source));
1149   PetscCall(PetscMalloc1(nrecvs + 1, &len));
1150   cnt = nrecvs;
1151   PetscCall(PetscCalloc1(ng + 1, &nownedsenders));
1152   while (cnt) {
1153     PetscCallMPI(MPI_Waitany(nrecvs, recv_waits, &imdex, &recv_status));
1154     /* unpack receives into our local space */
1155     PetscCallMPI(MPI_Get_count(&recv_status, MPIU_INT, &len[imdex]));
1156     source[imdex] = recv_status.MPI_SOURCE;
1157     len[imdex]    = len[imdex] / 2;
1158     /* count how many local owners for each of my global owned indices */
1159     for (i = 0; i < len[imdex]; i++) nownedsenders[recvs[2 * imdex * nmax + 2 * i] - rstart]++;
1160     cnt--;
1161   }
1162   PetscCall(PetscFree(recv_waits));
1163 
1164   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1165   nownedm = 0;
1166   for (i = 0; i < ng; i++) {
1167     if (nownedsenders[i] > 1) nownedm += nownedsenders[i];
1168   }
1169 
1170   /* create single array to contain rank of all local owners of each globally owned index */
1171   PetscCall(PetscMalloc1(nownedm + 1, &ownedsenders));
1172   PetscCall(PetscMalloc1(ng + 1, &starts));
1173   starts[0] = 0;
1174   for (i = 1; i < ng; i++) {
1175     if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1176     else starts[i] = starts[i - 1];
1177   }
1178 
1179   /* for each nontrivial globally owned node list all arriving processors */
1180   for (i = 0; i < nrecvs; i++) {
1181     for (j = 0; j < len[i]; j++) {
1182       node = recvs[2 * i * nmax + 2 * j] - rstart;
1183       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1184     }
1185   }
1186 
1187   if (debug) { /* -----------------------------------  */
1188     starts[0] = 0;
1189     for (i = 1; i < ng; i++) {
1190       if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1191       else starts[i] = starts[i - 1];
1192     }
1193     for (i = 0; i < ng; i++) {
1194       if (nownedsenders[i] > 1) {
1195         PetscCall(PetscSynchronizedPrintf(comm, "[%d] global node %" PetscInt_FMT " local owner processors: ", rank, i + rstart));
1196         for (j = 0; j < nownedsenders[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", ownedsenders[starts[i] + j]));
1197         PetscCall(PetscSynchronizedPrintf(comm, "\n"));
1198       }
1199     }
1200     PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
1201   } /* -----------------------------------  */
1202 
1203   /* wait on original sends */
1204   if (nsends) {
1205     PetscCall(PetscMalloc1(nsends, &send_status));
1206     PetscCallMPI(MPI_Waitall(nsends, send_waits, send_status));
1207     PetscCall(PetscFree(send_status));
1208   }
1209   PetscCall(PetscFree(send_waits));
1210   PetscCall(PetscFree(sends));
1211   PetscCall(PetscFree(nprocs));
1212 
1213   /* pack messages to send back to local owners */
1214   starts[0] = 0;
1215   for (i = 1; i < ng; i++) {
1216     if (nownedsenders[i - 1] > 1) starts[i] = starts[i - 1] + nownedsenders[i - 1];
1217     else starts[i] = starts[i - 1];
1218   }
1219   nsends2 = nrecvs;
1220   PetscCall(PetscMalloc1(nsends2 + 1, &nprocs)); /* length of each message */
1221   for (i = 0; i < nrecvs; i++) {
1222     nprocs[i] = 1;
1223     for (j = 0; j < len[i]; j++) {
1224       node = recvs[2 * i * nmax + 2 * j] - rstart;
1225       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1226     }
1227   }
1228   nt = 0;
1229   for (i = 0; i < nsends2; i++) nt += nprocs[i];
1230 
1231   PetscCall(PetscMalloc1(nt + 1, &sends2));
1232   PetscCall(PetscMalloc1(nsends2 + 1, &starts2));
1233 
1234   starts2[0] = 0;
1235   for (i = 1; i < nsends2; i++) starts2[i] = starts2[i - 1] + nprocs[i - 1];
1236   /*
1237      Each message is 1 + nprocs[i] long, and consists of
1238        (0) the number of nodes being sent back
1239        (1) the local node number,
1240        (2) the number of processors sharing it,
1241        (3) the processors sharing it
1242   */
1243   for (i = 0; i < nsends2; i++) {
1244     cnt                = 1;
1245     sends2[starts2[i]] = 0;
1246     for (j = 0; j < len[i]; j++) {
1247       node = recvs[2 * i * nmax + 2 * j] - rstart;
1248       if (nownedsenders[node] > 1) {
1249         sends2[starts2[i]]++;
1250         sends2[starts2[i] + cnt++] = recvs[2 * i * nmax + 2 * j + 1];
1251         sends2[starts2[i] + cnt++] = nownedsenders[node];
1252         PetscCall(PetscArraycpy(&sends2[starts2[i] + cnt], &ownedsenders[starts[node]], nownedsenders[node]));
1253         cnt += nownedsenders[node];
1254       }
1255     }
1256   }
1257 
1258   /* receive the message lengths */
1259   nrecvs2 = nsends;
1260   PetscCall(PetscMalloc1(nrecvs2 + 1, &lens2));
1261   PetscCall(PetscMalloc1(nrecvs2 + 1, &starts3));
1262   PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits));
1263   for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(&lens2[i], 1, MPIU_INT, dest[i], tag2, comm, recv_waits + i));
1264 
1265   /* send the message lengths */
1266   for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Send(&nprocs[i], 1, MPIU_INT, source[i], tag2, comm));
1267 
1268   /* wait on receives of lens */
1269   if (nrecvs2) {
1270     PetscCall(PetscMalloc1(nrecvs2, &recv_statuses));
1271     PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses));
1272     PetscCall(PetscFree(recv_statuses));
1273   }
1274   PetscCall(PetscFree(recv_waits));
1275 
1276   starts3[0] = 0;
1277   nt         = 0;
1278   for (i = 0; i < nrecvs2 - 1; i++) {
1279     starts3[i + 1] = starts3[i] + lens2[i];
1280     nt += lens2[i];
1281   }
1282   if (nrecvs2) nt += lens2[nrecvs2 - 1];
1283 
1284   PetscCall(PetscMalloc1(nt + 1, &recvs2));
1285   PetscCall(PetscMalloc1(nrecvs2 + 1, &recv_waits));
1286   for (i = 0; i < nrecvs2; i++) PetscCallMPI(MPI_Irecv(recvs2 + starts3[i], lens2[i], MPIU_INT, dest[i], tag3, comm, recv_waits + i));
1287 
1288   /* send the messages */
1289   PetscCall(PetscMalloc1(nsends2 + 1, &send_waits));
1290   for (i = 0; i < nsends2; i++) PetscCallMPI(MPI_Isend(sends2 + starts2[i], nprocs[i], MPIU_INT, source[i], tag3, comm, send_waits + i));
1291 
1292   /* wait on receives */
1293   if (nrecvs2) {
1294     PetscCall(PetscMalloc1(nrecvs2, &recv_statuses));
1295     PetscCallMPI(MPI_Waitall(nrecvs2, recv_waits, recv_statuses));
1296     PetscCall(PetscFree(recv_statuses));
1297   }
1298   PetscCall(PetscFree(recv_waits));
1299   PetscCall(PetscFree(nprocs));
1300 
1301   if (debug) { /* -----------------------------------  */
1302     cnt = 0;
1303     for (i = 0; i < nrecvs2; i++) {
1304       nt = recvs2[cnt++];
1305       for (j = 0; j < nt; j++) {
1306         PetscCall(PetscSynchronizedPrintf(comm, "[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ", rank, recvs2[cnt], recvs2[cnt + 1]));
1307         for (k = 0; k < recvs2[cnt + 1]; k++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", recvs2[cnt + 2 + k]));
1308         cnt += 2 + recvs2[cnt + 1];
1309         PetscCall(PetscSynchronizedPrintf(comm, "\n"));
1310       }
1311     }
1312     PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
1313   } /* -----------------------------------  */
1314 
1315   /* count number subdomains for each local node */
1316   PetscCall(PetscCalloc1(size, &nprocs));
1317   cnt = 0;
1318   for (i = 0; i < nrecvs2; i++) {
1319     nt = recvs2[cnt++];
1320     for (j = 0; j < nt; j++) {
1321       for (k = 0; k < recvs2[cnt + 1]; k++) nprocs[recvs2[cnt + 2 + k]]++;
1322       cnt += 2 + recvs2[cnt + 1];
1323     }
1324   }
1325   nt = 0;
1326   for (i = 0; i < size; i++) nt += (nprocs[i] > 0);
1327   *nproc = nt;
1328   PetscCall(PetscMalloc1(nt + 1, procs));
1329   PetscCall(PetscMalloc1(nt + 1, numprocs));
1330   PetscCall(PetscMalloc1(nt + 1, indices));
1331   for (i = 0; i < nt + 1; i++) (*indices)[i] = NULL;
1332   PetscCall(PetscMalloc1(size, &bprocs));
1333   cnt = 0;
1334   for (i = 0; i < size; i++) {
1335     if (nprocs[i] > 0) {
1336       bprocs[i]        = cnt;
1337       (*procs)[cnt]    = i;
1338       (*numprocs)[cnt] = nprocs[i];
1339       PetscCall(PetscMalloc1(nprocs[i], &(*indices)[cnt]));
1340       cnt++;
1341     }
1342   }
1343 
1344   /* make the list of subdomains for each nontrivial local node */
1345   PetscCall(PetscArrayzero(*numprocs, nt));
1346   cnt = 0;
1347   for (i = 0; i < nrecvs2; i++) {
1348     nt = recvs2[cnt++];
1349     for (j = 0; j < nt; j++) {
1350       for (k = 0; k < recvs2[cnt + 1]; k++) (*indices)[bprocs[recvs2[cnt + 2 + k]]][(*numprocs)[bprocs[recvs2[cnt + 2 + k]]]++] = recvs2[cnt];
1351       cnt += 2 + recvs2[cnt + 1];
1352     }
1353   }
1354   PetscCall(PetscFree(bprocs));
1355   PetscCall(PetscFree(recvs2));
1356 
1357   /* sort the node indexing by their global numbers */
1358   nt = *nproc;
1359   for (i = 0; i < nt; i++) {
1360     PetscCall(PetscMalloc1((*numprocs)[i], &tmp));
1361     for (j = 0; j < (*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1362     PetscCall(PetscSortIntWithArray((*numprocs)[i], tmp, (*indices)[i]));
1363     PetscCall(PetscFree(tmp));
1364   }
1365 
1366   if (debug) { /* -----------------------------------  */
1367     nt = *nproc;
1368     for (i = 0; i < nt; i++) {
1369       PetscCall(PetscSynchronizedPrintf(comm, "[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ", rank, (*procs)[i], (*numprocs)[i]));
1370       for (j = 0; j < (*numprocs)[i]; j++) PetscCall(PetscSynchronizedPrintf(comm, "%" PetscInt_FMT " ", (*indices)[i][j]));
1371       PetscCall(PetscSynchronizedPrintf(comm, "\n"));
1372     }
1373     PetscCall(PetscSynchronizedFlush(comm, PETSC_STDOUT));
1374   } /* -----------------------------------  */
1375 
1376   /* wait on sends */
1377   if (nsends2) {
1378     PetscCall(PetscMalloc1(nsends2, &send_status));
1379     PetscCallMPI(MPI_Waitall(nsends2, send_waits, send_status));
1380     PetscCall(PetscFree(send_status));
1381   }
1382 
1383   PetscCall(PetscFree(starts3));
1384   PetscCall(PetscFree(dest));
1385   PetscCall(PetscFree(send_waits));
1386 
1387   PetscCall(PetscFree(nownedsenders));
1388   PetscCall(PetscFree(ownedsenders));
1389   PetscCall(PetscFree(starts));
1390   PetscCall(PetscFree(starts2));
1391   PetscCall(PetscFree(lens2));
1392 
1393   PetscCall(PetscFree(source));
1394   PetscCall(PetscFree(len));
1395   PetscCall(PetscFree(recvs));
1396   PetscCall(PetscFree(nprocs));
1397   PetscCall(PetscFree(sends2));
1398 
1399   /* put the information about myself as the first entry in the list */
1400   first_procs    = (*procs)[0];
1401   first_numprocs = (*numprocs)[0];
1402   first_indices  = (*indices)[0];
1403   for (i = 0; i < *nproc; i++) {
1404     if ((*procs)[i] == rank) {
1405       (*procs)[0]    = (*procs)[i];
1406       (*numprocs)[0] = (*numprocs)[i];
1407       (*indices)[0]  = (*indices)[i];
1408       (*procs)[i]    = first_procs;
1409       (*numprocs)[i] = first_numprocs;
1410       (*indices)[i]  = first_indices;
1411       break;
1412     }
1413   }
1414 
1415   /* save info for reuse */
1416   mapping->info_nproc    = *nproc;
1417   mapping->info_procs    = *procs;
1418   mapping->info_numprocs = *numprocs;
1419   mapping->info_indices  = *indices;
1420   mapping->info_cached   = PETSC_TRUE;
1421   PetscFunctionReturn(PETSC_SUCCESS);
1422 }
1423 
1424 /*@C
1425   ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetBlockInfo()`
1426 
1427   Collective
1428 
1429   Input Parameter:
1430 . mapping - the mapping from local to global indexing
1431 
1432   Output Parameters:
1433 + nproc    - number of processors that are connected to this one
1434 . procs    - neighboring processors
1435 . numprocs - number of indices for each processor
1436 - indices  - indices of local nodes shared with neighbor (sorted by global numbering)
1437 
1438   Level: advanced
1439 
1440 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1441           `ISLocalToGlobalMappingGetInfo()`
1442 @*/
1443 PetscErrorCode ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1444 {
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
1447   if (mapping->info_free) {
1448     PetscCall(PetscFree(*numprocs));
1449     if (*indices) {
1450       PetscInt i;
1451 
1452       PetscCall(PetscFree((*indices)[0]));
1453       for (i = 1; i < *nproc; i++) PetscCall(PetscFree((*indices)[i]));
1454       PetscCall(PetscFree(*indices));
1455     }
1456   }
1457   *nproc    = 0;
1458   *procs    = NULL;
1459   *numprocs = NULL;
1460   *indices  = NULL;
1461   PetscFunctionReturn(PETSC_SUCCESS);
1462 }
1463 
1464 /*@C
1465   ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1466   each index shared by more than one processor
1467 
1468   Collective
1469 
1470   Input Parameter:
1471 . mapping - the mapping from local to global indexing
1472 
1473   Output Parameters:
1474 + nproc    - number of processors that are connected to this one
1475 . procs    - neighboring processors
1476 . numprocs - number of indices for each subdomain (processor)
1477 - indices  - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1478 
1479   Level: advanced
1480 
1481   Note:
1482   The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed.
1483 
1484   Fortran Notes:
1485   There is no `ISLocalToGlobalMappingRestoreInfo()` in Fortran. You must make sure that
1486   `procs`[], `numprocs`[] and `indices`[][] are large enough arrays, either by allocating them
1487   dynamically or defining static ones large enough.
1488 
1489 .vb
1490   PetscInt indices[nproc][numprocmax],ierr)
1491   ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1492   ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1493 .ve
1494 
1495 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1496           `ISLocalToGlobalMappingRestoreInfo()`
1497 @*/
1498 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1499 {
1500   PetscInt **bindices = NULL, *bnumprocs = NULL, bs, i, j, k;
1501 
1502   PetscFunctionBegin;
1503   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
1504   bs = mapping->bs;
1505   PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping, nproc, procs, &bnumprocs, &bindices));
1506   if (bs > 1) { /* we need to expand the cached info */
1507     PetscCall(PetscCalloc1(*nproc, &*indices));
1508     PetscCall(PetscCalloc1(*nproc, &*numprocs));
1509     for (i = 0; i < *nproc; i++) {
1510       PetscCall(PetscMalloc1(bs * bnumprocs[i], &(*indices)[i]));
1511       for (j = 0; j < bnumprocs[i]; j++) {
1512         for (k = 0; k < bs; k++) (*indices)[i][j * bs + k] = bs * bindices[i][j] + k;
1513       }
1514       (*numprocs)[i] = bnumprocs[i] * bs;
1515     }
1516     mapping->info_free = PETSC_TRUE;
1517   } else {
1518     *numprocs = bnumprocs;
1519     *indices  = bindices;
1520   }
1521   PetscFunctionReturn(PETSC_SUCCESS);
1522 }
1523 
1524 /*@C
1525   ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetInfo()`
1526 
1527   Collective
1528 
1529   Input Parameter:
1530 . mapping - the mapping from local to global indexing
1531 
1532   Output Parameters:
1533 + nproc    - number of processors that are connected to this one
1534 . procs    - neighboring processors
1535 . numprocs - number of indices for each processor
1536 - indices  - indices of local nodes shared with neighbor (sorted by global numbering)
1537 
1538   Level: advanced
1539 
1540 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1541           `ISLocalToGlobalMappingGetInfo()`
1542 @*/
1543 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping, PetscInt *nproc, PetscInt *procs[], PetscInt *numprocs[], PetscInt **indices[])
1544 {
1545   PetscFunctionBegin;
1546   PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping, nproc, procs, numprocs, indices));
1547   PetscFunctionReturn(PETSC_SUCCESS);
1548 }
1549 
1550 /*@C
1551   ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each MPI rank
1552 
1553   Collective
1554 
1555   Input Parameter:
1556 . mapping - the mapping from local to global indexing
1557 
1558   Output Parameters:
1559 + nnodes  - number of local nodes (same `ISLocalToGlobalMappingGetSize()`)
1560 . count   - number of neighboring processors per node
1561 - indices - indices of processes sharing the node (sorted)
1562 
1563   Level: advanced
1564 
1565   Note:
1566   The user needs to call `ISLocalToGlobalMappingRestoreInfo()` when the data is no longer needed.
1567 
1568 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1569           `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()`
1570 @*/
1571 PetscErrorCode ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[])
1572 {
1573   PetscInt n;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
1577   PetscCall(ISLocalToGlobalMappingGetSize(mapping, &n));
1578   if (!mapping->info_nodec) {
1579     PetscInt i, m, n_neigh, *neigh, *n_shared, **shared;
1580 
1581     PetscCall(PetscMalloc2(n + 1, &mapping->info_nodec, n, &mapping->info_nodei));
1582     PetscCall(ISLocalToGlobalMappingGetInfo(mapping, &n_neigh, &neigh, &n_shared, &shared));
1583     for (i = 0; i < n; i++) mapping->info_nodec[i] = 1;
1584     m                      = n;
1585     mapping->info_nodec[n] = 0;
1586     for (i = 1; i < n_neigh; i++) {
1587       PetscInt j;
1588 
1589       m += n_shared[i];
1590       for (j = 0; j < n_shared[i]; j++) mapping->info_nodec[shared[i][j]] += 1;
1591     }
1592     if (n) PetscCall(PetscMalloc1(m, &mapping->info_nodei[0]));
1593     for (i = 1; i < n; i++) mapping->info_nodei[i] = mapping->info_nodei[i - 1] + mapping->info_nodec[i - 1];
1594     PetscCall(PetscArrayzero(mapping->info_nodec, n));
1595     for (i = 0; i < n; i++) {
1596       mapping->info_nodec[i]    = 1;
1597       mapping->info_nodei[i][0] = neigh[0];
1598     }
1599     for (i = 1; i < n_neigh; i++) {
1600       PetscInt j;
1601 
1602       for (j = 0; j < n_shared[i]; j++) {
1603         PetscInt k = shared[i][j];
1604 
1605         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1606         mapping->info_nodec[k] += 1;
1607       }
1608     }
1609     for (i = 0; i < n; i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i], mapping->info_nodei[i]));
1610     PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping, &n_neigh, &neigh, &n_shared, &shared));
1611   }
1612   if (nnodes) *nnodes = n;
1613   if (count) *count = mapping->info_nodec;
1614   if (indices) *indices = mapping->info_nodei;
1615   PetscFunctionReturn(PETSC_SUCCESS);
1616 }
1617 
1618 /*@C
1619   ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by `ISLocalToGlobalMappingGetNodeInfo()`
1620 
1621   Collective
1622 
1623   Input Parameter:
1624 . mapping - the mapping from local to global indexing
1625 
1626   Output Parameters:
1627 + nnodes  - number of local nodes
1628 . count   - number of neighboring processors per node
1629 - indices - indices of processes sharing the node (sorted)
1630 
1631   Level: advanced
1632 
1633 .seealso: [](sec_scatter), `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1634           `ISLocalToGlobalMappingGetInfo()`
1635 @*/
1636 PetscErrorCode ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping, PetscInt *nnodes, PetscInt *count[], PetscInt **indices[])
1637 {
1638   PetscFunctionBegin;
1639   PetscValidHeaderSpecific(mapping, IS_LTOGM_CLASSID, 1);
1640   if (nnodes) *nnodes = 0;
1641   if (count) *count = NULL;
1642   if (indices) *indices = NULL;
1643   PetscFunctionReturn(PETSC_SUCCESS);
1644 }
1645 
1646 /*MC
1647     ISLocalToGlobalMappingGetIndicesF90 - Get global indices for every local point that is mapped
1648 
1649     Synopsis:
1650     ISLocalToGlobalMappingGetIndicesF90(ISLocalToGlobalMapping ltog, PetscInt, pointer :: array(:)}, integer ierr)
1651 
1652     Not Collective
1653 
1654     Input Parameter:
1655 .   A - the matrix
1656 
1657     Output Parameter:
1658 .   array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1659 
1660     Level: advanced
1661 
1662     Note:
1663     Use  `ISLocalToGlobalMappingGetIndicesF90()` when you no longer need access to the data
1664 
1665 .seealso: [](sec_fortranarrays), [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingGetIndices()`, `ISLocalToGlobalMappingRestoreIndices()`,
1666           `ISLocalToGlobalMappingRestoreIndicesF90()`
1667 M*/
1668 
1669 /*MC
1670     ISLocalToGlobalMappingRestoreIndicesF90 - restores the global indices for every local point that is mapped obtained with `ISLocalToGlobalMappingGetIndicesF90()`
1671 
1672     Synopsis:
1673     ISLocalToGlobalMappingRestoreIndicesF90(ISLocalToGlobalMapping ltog, PetscInt, pointer :: array(:)}, integer ierr)
1674 
1675     Not Collective
1676 
1677     Input Parameters:
1678 +   A - the matrix
1679 -   array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1680 
1681     Level: advanced
1682 
1683 .seealso: [](sec_fortranarrays), [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingGetIndices()`, `ISLocalToGlobalMappingRestoreIndices()`,
1684           `ISLocalToGlobalMappingGetIndicesF90()`
1685 M*/
1686 
1687 /*@C
1688   ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1689 
1690   Not Collective
1691 
1692   Input Parameter:
1693 . ltog - local to global mapping
1694 
1695   Output Parameter:
1696 . array - array of indices, the length of this array may be obtained with `ISLocalToGlobalMappingGetSize()`
1697 
1698   Level: advanced
1699 
1700   Note:
1701   `ISLocalToGlobalMappingGetSize()` returns the length the this array
1702 
1703 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`,
1704           `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1705 @*/
1706 PetscErrorCode ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1707 {
1708   PetscFunctionBegin;
1709   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
1710   PetscAssertPointer(array, 2);
1711   if (ltog->bs == 1) {
1712     *array = ltog->indices;
1713   } else {
1714     PetscInt       *jj, k, i, j, n = ltog->n, bs = ltog->bs;
1715     const PetscInt *ii;
1716 
1717     PetscCall(PetscMalloc1(bs * n, &jj));
1718     *array = jj;
1719     k      = 0;
1720     ii     = ltog->indices;
1721     for (i = 0; i < n; i++)
1722       for (j = 0; j < bs; j++) jj[k++] = bs * ii[i] + j;
1723   }
1724   PetscFunctionReturn(PETSC_SUCCESS);
1725 }
1726 
1727 /*@C
1728   ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with `ISLocalToGlobalMappingGetIndices()`
1729 
1730   Not Collective
1731 
1732   Input Parameters:
1733 + ltog  - local to global mapping
1734 - array - array of indices
1735 
1736   Level: advanced
1737 
1738 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1739 @*/
1740 PetscErrorCode ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1741 {
1742   PetscFunctionBegin;
1743   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
1744   PetscAssertPointer(array, 2);
1745   PetscCheck(ltog->bs != 1 || *array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer");
1746 
1747   if (ltog->bs > 1) PetscCall(PetscFree(*(void **)array));
1748   PetscFunctionReturn(PETSC_SUCCESS);
1749 }
1750 
1751 /*@C
1752   ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1753 
1754   Not Collective
1755 
1756   Input Parameter:
1757 . ltog - local to global mapping
1758 
1759   Output Parameter:
1760 . array - array of indices
1761 
1762   Level: advanced
1763 
1764 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1765 @*/
1766 PetscErrorCode ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1767 {
1768   PetscFunctionBegin;
1769   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
1770   PetscAssertPointer(array, 2);
1771   *array = ltog->indices;
1772   PetscFunctionReturn(PETSC_SUCCESS);
1773 }
1774 
1775 /*@C
1776   ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with `ISLocalToGlobalMappingGetBlockIndices()`
1777 
1778   Not Collective
1779 
1780   Input Parameters:
1781 + ltog  - local to global mapping
1782 - array - array of indices
1783 
1784   Level: advanced
1785 
1786 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1787 @*/
1788 PetscErrorCode ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog, const PetscInt **array)
1789 {
1790   PetscFunctionBegin;
1791   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
1792   PetscAssertPointer(array, 2);
1793   PetscCheck(*array == ltog->indices, PETSC_COMM_SELF, PETSC_ERR_ARG_BADPTR, "Trying to return mismatched pointer");
1794   *array = NULL;
1795   PetscFunctionReturn(PETSC_SUCCESS);
1796 }
1797 
1798 /*@C
1799   ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1800 
1801   Not Collective
1802 
1803   Input Parameters:
1804 + comm  - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1805 . n     - number of mappings to concatenate
1806 - ltogs - local to global mappings
1807 
1808   Output Parameter:
1809 . ltogcat - new mapping
1810 
1811   Level: advanced
1812 
1813   Note:
1814   This currently always returns a mapping with block size of 1
1815 
1816   Developer Notes:
1817   If all the input mapping have the same block size we could easily handle that as a special case
1818 
1819 .seealso: [](sec_scatter), `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingCreate()`
1820 @*/
1821 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm, PetscInt n, const ISLocalToGlobalMapping ltogs[], ISLocalToGlobalMapping *ltogcat)
1822 {
1823   PetscInt i, cnt, m, *idx;
1824 
1825   PetscFunctionBegin;
1826   PetscCheck(n >= 0, comm, PETSC_ERR_ARG_OUTOFRANGE, "Must have a non-negative number of mappings, given %" PetscInt_FMT, n);
1827   if (n > 0) PetscAssertPointer(ltogs, 3);
1828   for (i = 0; i < n; i++) PetscValidHeaderSpecific(ltogs[i], IS_LTOGM_CLASSID, 3);
1829   PetscAssertPointer(ltogcat, 4);
1830   for (cnt = 0, i = 0; i < n; i++) {
1831     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m));
1832     cnt += m;
1833   }
1834   PetscCall(PetscMalloc1(cnt, &idx));
1835   for (cnt = 0, i = 0; i < n; i++) {
1836     const PetscInt *subidx;
1837     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i], &m));
1838     PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i], &subidx));
1839     PetscCall(PetscArraycpy(&idx[cnt], subidx, m));
1840     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i], &subidx));
1841     cnt += m;
1842   }
1843   PetscCall(ISLocalToGlobalMappingCreate(comm, 1, cnt, idx, PETSC_OWN_POINTER, ltogcat));
1844   PetscFunctionReturn(PETSC_SUCCESS);
1845 }
1846 
1847 /*MC
1848       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is
1849                                     used this is good for only small and moderate size problems.
1850 
1851    Options Database Key:
1852 .   -islocaltoglobalmapping_type basic - select this method
1853 
1854    Level: beginner
1855 
1856    Developer Note:
1857    This stores all the mapping information on each MPI rank.
1858 
1859 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1860 M*/
1861 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1862 {
1863   PetscFunctionBegin;
1864   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1865   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1866   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1867   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1868   PetscFunctionReturn(PETSC_SUCCESS);
1869 }
1870 
1871 /*MC
1872       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the `ISLocalToGlobalMapping` object. When `ISGlobalToLocalMappingApply()` is
1873                                     used this is good for large memory problems.
1874 
1875    Options Database Key:
1876 .   -islocaltoglobalmapping_type hash - select this method
1877 
1878    Level: beginner
1879 
1880    Note:
1881     This is selected automatically for large problems if the user does not set the type.
1882 
1883 .seealso: [](sec_scatter), `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGBASIC`
1884 M*/
1885 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1886 {
1887   PetscFunctionBegin;
1888   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1889   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1890   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1891   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1892   PetscFunctionReturn(PETSC_SUCCESS);
1893 }
1894 
1895 /*@C
1896   ISLocalToGlobalMappingRegister -  Registers a method for applying a global to local mapping with an `ISLocalToGlobalMapping`
1897 
1898   Not Collective
1899 
1900   Input Parameters:
1901 + sname    - name of a new method
1902 - function - routine to create method context
1903 
1904   Example Usage:
1905 .vb
1906    ISLocalToGlobalMappingRegister("my_mapper", MyCreate);
1907 .ve
1908 
1909   Then, your mapping can be chosen with the procedural interface via
1910 $     ISLocalToGlobalMappingSetType(ltog, "my_mapper")
1911   or at runtime via the option
1912 $     -islocaltoglobalmapping_type my_mapper
1913 
1914   Level: advanced
1915 
1916   Note:
1917   `ISLocalToGlobalMappingRegister()` may be called multiple times to add several user-defined mappings.
1918 
1919 .seealso: [](sec_scatter), `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`,
1920           `ISLOCALTOGLOBALMAPPINGHASH`, `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingApply()`
1921 @*/
1922 PetscErrorCode ISLocalToGlobalMappingRegister(const char sname[], PetscErrorCode (*function)(ISLocalToGlobalMapping))
1923 {
1924   PetscFunctionBegin;
1925   PetscCall(ISInitializePackage());
1926   PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList, sname, function));
1927   PetscFunctionReturn(PETSC_SUCCESS);
1928 }
1929 
1930 /*@C
1931   ISLocalToGlobalMappingSetType - Sets the implementation type `ISLocalToGlobalMapping` will use
1932 
1933   Logically Collective
1934 
1935   Input Parameters:
1936 + ltog - the `ISLocalToGlobalMapping` object
1937 - type - a known method
1938 
1939   Options Database Key:
1940 . -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list of available methods (for instance, basic or hash)
1941 
1942   Level: intermediate
1943 
1944   Notes:
1945   See `ISLocalToGlobalMappingType` for available methods
1946 
1947   Normally, it is best to use the `ISLocalToGlobalMappingSetFromOptions()` command and
1948   then set the `ISLocalToGlobalMappingType` from the options database rather than by using
1949   this routine.
1950 
1951   Developer Notes:
1952   `ISLocalToGlobalMappingRegister()` is used to add new types to `ISLocalToGlobalMappingList` from which they
1953   are accessed by `ISLocalToGlobalMappingSetType()`.
1954 
1955 .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()`
1956 @*/
1957 PetscErrorCode ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1958 {
1959   PetscBool match;
1960   PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1961 
1962   PetscFunctionBegin;
1963   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
1964   if (type) PetscAssertPointer(type, 2);
1965 
1966   PetscCall(PetscObjectTypeCompare((PetscObject)ltog, type, &match));
1967   if (match) PetscFunctionReturn(PETSC_SUCCESS);
1968 
1969   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1970   if (type) {
1971     PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList, type, &r));
1972     PetscCheck(r, PetscObjectComm((PetscObject)ltog), PETSC_ERR_ARG_UNKNOWN_TYPE, "Unable to find requested ISLocalToGlobalMapping type %s", type);
1973   }
1974   /* Destroy the previous private LTOG context */
1975   PetscTryTypeMethod(ltog, destroy);
1976   ltog->ops->destroy = NULL;
1977 
1978   PetscCall(PetscObjectChangeTypeName((PetscObject)ltog, type));
1979   if (r) PetscCall((*r)(ltog));
1980   PetscFunctionReturn(PETSC_SUCCESS);
1981 }
1982 
1983 /*@C
1984   ISLocalToGlobalMappingGetType - Get the type of the `ISLocalToGlobalMapping`
1985 
1986   Not Collective
1987 
1988   Input Parameter:
1989 . ltog - the `ISLocalToGlobalMapping` object
1990 
1991   Output Parameter:
1992 . type - the type
1993 
1994   Level: intermediate
1995 
1996 .seealso: [](sec_scatter), `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`
1997 @*/
1998 PetscErrorCode ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1999 {
2000   PetscFunctionBegin;
2001   PetscValidHeaderSpecific(ltog, IS_LTOGM_CLASSID, 1);
2002   PetscAssertPointer(type, 2);
2003   *type = ((PetscObject)ltog)->type_name;
2004   PetscFunctionReturn(PETSC_SUCCESS);
2005 }
2006 
2007 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
2008 
2009 /*@C
2010   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the `IS` package.
2011 
2012   Not Collective
2013 
2014   Level: advanced
2015 
2016 .seealso: [](sec_scatter), `ISRegister()`, `ISLocalToGlobalRegister()`
2017 @*/
2018 PetscErrorCode ISLocalToGlobalMappingRegisterAll(void)
2019 {
2020   PetscFunctionBegin;
2021   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(PETSC_SUCCESS);
2022   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
2023   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
2024   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
2025   PetscFunctionReturn(PETSC_SUCCESS);
2026 }
2027