xref: /petsc/src/vec/is/utils/isltog.c (revision 96b592737fa480036d2a4095db7e1939ed49daf5)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47 
48   PetscFunctionBeginHot;
49   *pStart = 0;
50   *points = NULL;
51   PetscCall(ISGetLocalSize(pointIS, &numCells));
52   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
53   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
54   *pEnd   = *pStart + numCells;
55   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
56   PetscFunctionReturn(0);
57 }
58 
59 /*@C
60   ISRestorePointRange - Destroys the traversal description
61 
62   Not collective
63 
64   Input Parameters:
65 + pointIS - The IS object
66 . pStart  - The first index, from ISGetPointRange()
67 . pEnd    - One past the last index, from ISGetPointRange()
68 - points  - The indices, from ISGetPointRange()
69 
70   Notes:
71   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
72 $ ISGetPointRange(is, &pStart, &pEnd, &points);
73 $ for (p = pStart; p < pEnd; ++p) {
74 $   const PetscInt point = points ? points[p] : p;
75 $ }
76 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
77 
78   Level: intermediate
79 
80 .seealso: `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
81 @*/
82 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
83 {
84   PetscInt       step = 1;
85   PetscBool      isStride;
86 
87   PetscFunctionBeginHot;
88   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
89   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
90   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
91   PetscFunctionReturn(0);
92 }
93 
94 /*@C
95   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
96 
97   Not collective
98 
99   Input Parameters:
100 + subpointIS - The IS object to be configured
101 . pStar   t  - The first index of the subrange
102 . pEnd       - One past the last index for the subrange
103 - points     - The indices for the entire range, from ISGetPointRange()
104 
105   Output Parameters:
106 . subpointIS - The IS object now configured to be a subrange
107 
108   Notes:
109   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
110 
111   Level: intermediate
112 
113 .seealso: `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()`
114 @*/
115 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
116 {
117   PetscFunctionBeginHot;
118   if (points) {
119     PetscCall(ISSetType(subpointIS, ISGENERAL));
120     PetscCall(ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER));
121   } else {
122     PetscCall(ISSetType(subpointIS, ISSTRIDE));
123     PetscCall(ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /* -----------------------------------------------------------------------------------------*/
129 
130 /*
131     Creates the global mapping information in the ISLocalToGlobalMapping structure
132 
133     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
134 */
135 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
136 {
137   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
138 
139   PetscFunctionBegin;
140   if (mapping->data) PetscFunctionReturn(0);
141   end   = 0;
142   start = PETSC_MAX_INT;
143 
144   for (i=0; i<n; i++) {
145     if (idx[i] < 0) continue;
146     if (idx[i] < start) start = idx[i];
147     if (idx[i] > end)   end   = idx[i];
148   }
149   if (start > end) {start = 0; end = -1;}
150   mapping->globalstart = start;
151   mapping->globalend   = end;
152   if (!((PetscObject)mapping)->type_name) {
153     if ((end - start) > PetscMax(4*n,1000000)) {
154       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH));
155     } else {
156       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC));
157     }
158   }
159   if (mapping->ops->globaltolocalmappingsetup) PetscCall((*mapping->ops->globaltolocalmappingsetup)(mapping));
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
164 {
165   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
166   ISLocalToGlobalMapping_Basic *map;
167 
168   PetscFunctionBegin;
169   start            = mapping->globalstart;
170   end              = mapping->globalend;
171   PetscCall(PetscNew(&map));
172   PetscCall(PetscMalloc1(end-start+2,&globals));
173   map->globals     = globals;
174   for (i=0; i<end-start+1; i++) globals[i] = -1;
175   for (i=0; i<n; i++) {
176     if (idx[i] < 0) continue;
177     globals[idx[i] - start] = i;
178   }
179   mapping->data = (void*)map;
180   PetscCall(PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt)));
181   PetscFunctionReturn(0);
182 }
183 
184 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
185 {
186   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
187   ISLocalToGlobalMapping_Hash *map;
188 
189   PetscFunctionBegin;
190   PetscCall(PetscNew(&map));
191   PetscCall(PetscHMapICreate(&map->globalht));
192   for (i=0; i<n; i++) {
193     if (idx[i] < 0) continue;
194     PetscCall(PetscHMapISet(map->globalht,idx[i],i));
195   }
196   mapping->data = (void*)map;
197   PetscCall(PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt)));
198   PetscFunctionReturn(0);
199 }
200 
201 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
202 {
203   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
204 
205   PetscFunctionBegin;
206   if (!map) PetscFunctionReturn(0);
207   PetscCall(PetscFree(map->globals));
208   PetscCall(PetscFree(mapping->data));
209   PetscFunctionReturn(0);
210 }
211 
212 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
213 {
214   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
215 
216   PetscFunctionBegin;
217   if (!map) PetscFunctionReturn(0);
218   PetscCall(PetscHMapIDestroy(&map->globalht));
219   PetscCall(PetscFree(mapping->data));
220   PetscFunctionReturn(0);
221 }
222 
223 #define GTOLTYPE _Basic
224 #define GTOLNAME _Basic
225 #define GTOLBS mapping->bs
226 #define GTOL(g, local) do {                  \
227     local = map->globals[g/bs - start];      \
228     if (local >= 0) local = bs*local + (g % bs); \
229   } while (0)
230 
231 #include <../src/vec/is/utils/isltog.h>
232 
233 #define GTOLTYPE _Basic
234 #define GTOLNAME Block_Basic
235 #define GTOLBS 1
236 #define GTOL(g, local) do {                  \
237     local = map->globals[g - start];         \
238   } while (0)
239 #include <../src/vec/is/utils/isltog.h>
240 
241 #define GTOLTYPE _Hash
242 #define GTOLNAME _Hash
243 #define GTOLBS mapping->bs
244 #define GTOL(g, local) do {                         \
245     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
246     if (local >= 0) local = bs*local + (g % bs);   \
247    } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME Block_Hash
252 #define GTOLBS 1
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g,&local);    \
255   } while (0)
256 #include <../src/vec/is/utils/isltog.h>
257 
258 /*@
259     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
260 
261     Not Collective
262 
263     Input Parameter:
264 .   ltog - local to global mapping
265 
266     Output Parameter:
267 .   nltog - the duplicated local to global mapping
268 
269     Level: advanced
270 
271 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
272 @*/
273 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
274 {
275   ISLocalToGlobalMappingType l2gtype;
276 
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
279   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog));
280   PetscCall(ISLocalToGlobalMappingGetType(ltog,&l2gtype));
281   PetscCall(ISLocalToGlobalMappingSetType(*nltog,l2gtype));
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
287 
288     Not Collective
289 
290     Input Parameter:
291 .   ltog - local to global mapping
292 
293     Output Parameter:
294 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
295 
296     Level: advanced
297 
298 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
299 @*/
300 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
301 {
302   PetscFunctionBegin;
303   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
304   PetscValidIntPointer(n,2);
305   *n = mapping->bs*mapping->n;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@C
310    ISLocalToGlobalMappingViewFromOptions - View from Options
311 
312    Collective on ISLocalToGlobalMapping
313 
314    Input Parameters:
315 +  A - the local to global mapping object
316 .  obj - Optional object
317 -  name - command line option
318 
319    Level: intermediate
320 .seealso: `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()`
321 @*/
322 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
323 {
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
326   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
327   PetscFunctionReturn(0);
328 }
329 
330 /*@C
331     ISLocalToGlobalMappingView - View a local to global mapping
332 
333     Not Collective
334 
335     Input Parameters:
336 +   ltog - local to global mapping
337 -   viewer - viewer
338 
339     Level: advanced
340 
341 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
344 {
345   PetscInt       i;
346   PetscMPIInt    rank;
347   PetscBool      iascii;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
351   if (!viewer) {
352     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer));
353   }
354   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
355 
356   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank));
357   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
358   if (iascii) {
359     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer));
360     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
361     for (i=0; i<mapping->n; i++) {
362       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]));
363     }
364     PetscCall(PetscViewerFlush(viewer));
365     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
366   }
367   PetscFunctionReturn(0);
368 }
369 
370 /*@
371     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
372     ordering and a global parallel ordering.
373 
374     Not collective
375 
376     Input Parameter:
377 .   is - index set containing the global numbers for each local number
378 
379     Output Parameter:
380 .   mapping - new mapping data structure
381 
382     Notes:
383     the block size of the IS determines the block size of the mapping
384     Level: advanced
385 
386 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()`
387 @*/
388 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
389 {
390   PetscInt       n,bs;
391   const PetscInt *indices;
392   MPI_Comm       comm;
393   PetscBool      isblock;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(is,IS_CLASSID,1);
397   PetscValidPointer(mapping,2);
398 
399   PetscCall(PetscObjectGetComm((PetscObject)is,&comm));
400   PetscCall(ISGetLocalSize(is,&n));
401   PetscCall(PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock));
402   if (!isblock) {
403     PetscCall(ISGetIndices(is,&indices));
404     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping));
405     PetscCall(ISRestoreIndices(is,&indices));
406   } else {
407     PetscCall(ISGetBlockSize(is,&bs));
408     PetscCall(ISBlockGetIndices(is,&indices));
409     PetscCall(ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping));
410     PetscCall(ISBlockRestoreIndices(is,&indices));
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 /*@C
416     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
417     ordering and a global parallel ordering.
418 
419     Collective
420 
421     Input Parameters:
422 +   sf - star forest mapping contiguous local indices to (rank, offset)
423 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
424 
425     Output Parameter:
426 .   mapping - new mapping data structure
427 
428     Level: advanced
429 
430     Notes:
431     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
432 
433 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`
434 @*/
435 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
436 {
437   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
438   MPI_Comm       comm;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
442   PetscValidPointer(mapping,3);
443   PetscCall(PetscObjectGetComm((PetscObject)sf,&comm));
444   PetscCall(PetscSFGetGraph(sf,&nroots,&nleaves,NULL,NULL));
445   if (start == PETSC_DECIDE) {
446     start = 0;
447     PetscCallMPI(MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm));
448   } else PetscCheck(start >= 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
449   PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal));
450   ++maxlocal;
451   PetscCall(PetscMalloc1(nroots,&globals));
452   PetscCall(PetscMalloc1(maxlocal,&ltog));
453   for (i=0; i<nroots; i++) globals[i] = start + i;
454   for (i=0; i<maxlocal; i++) ltog[i] = -1;
455   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
456   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
457   PetscCall(ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping));
458   PetscCall(PetscFree(globals));
459   PetscFunctionReturn(0);
460 }
461 
462 /*@
463     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
464 
465     Not collective
466 
467     Input Parameters:
468 +   mapping - mapping data structure
469 -   bs - the blocksize
470 
471     Level: advanced
472 
473 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
474 @*/
475 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
476 {
477   PetscInt       *nid;
478   const PetscInt *oid;
479   PetscInt       i,cn,on,obs,nn;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
483   PetscCheck(bs >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
484   if (bs == mapping->bs) PetscFunctionReturn(0);
485   on  = mapping->n;
486   obs = mapping->bs;
487   oid = mapping->indices;
488   nn  = (on*obs)/bs;
489   PetscCheck((on*obs)%bs == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
490 
491   PetscCall(PetscMalloc1(nn,&nid));
492   PetscCall(ISLocalToGlobalMappingGetIndices(mapping,&oid));
493   for (i=0;i<nn;i++) {
494     PetscInt j;
495     for (j=0,cn=0;j<bs-1;j++) {
496       if (oid[i*bs+j] < 0) { cn++; continue; }
497       PetscCheck(oid[i*bs+j] == oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
498     }
499     if (oid[i*bs+j] < 0) cn++;
500     if (cn) {
501       PetscCheck(cn == bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
502       nid[i] = -1;
503     } else {
504       nid[i] = oid[i*bs]/bs;
505     }
506   }
507   PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping,&oid));
508 
509   mapping->n           = nn;
510   mapping->bs          = bs;
511   PetscCall(PetscFree(mapping->indices));
512   mapping->indices     = nid;
513   mapping->globalstart = 0;
514   mapping->globalend   = 0;
515 
516   /* reset the cached information */
517   PetscCall(PetscFree(mapping->info_procs));
518   PetscCall(PetscFree(mapping->info_numprocs));
519   if (mapping->info_indices) {
520     PetscInt i;
521 
522     PetscCall(PetscFree((mapping->info_indices)[0]));
523     for (i=1; i<mapping->info_nproc; i++) {
524       PetscCall(PetscFree(mapping->info_indices[i]));
525     }
526     PetscCall(PetscFree(mapping->info_indices));
527   }
528   mapping->info_cached = PETSC_FALSE;
529 
530   if (mapping->ops->destroy) PetscCall((*mapping->ops->destroy)(mapping));
531   PetscFunctionReturn(0);
532 }
533 
534 /*@
535     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
536     ordering and a global parallel ordering.
537 
538     Not Collective
539 
540     Input Parameters:
541 .   mapping - mapping data structure
542 
543     Output Parameter:
544 .   bs - the blocksize
545 
546     Level: advanced
547 
548 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
549 @*/
550 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
551 {
552   PetscFunctionBegin;
553   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
554   *bs = mapping->bs;
555   PetscFunctionReturn(0);
556 }
557 
558 /*@
559     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
560     ordering and a global parallel ordering.
561 
562     Not Collective, but communicator may have more than one process
563 
564     Input Parameters:
565 +   comm - MPI communicator
566 .   bs - the block size
567 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
568 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
569 -   mode - see PetscCopyMode
570 
571     Output Parameter:
572 .   mapping - new mapping data structure
573 
574     Notes:
575     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
576 
577     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
578     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
579     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
580 
581     Level: advanced
582 
583 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
584           `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
585 @*/
586 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
587 {
588   PetscInt       *in;
589 
590   PetscFunctionBegin;
591   if (n) PetscValidIntPointer(indices,4);
592   PetscValidPointer(mapping,6);
593 
594   *mapping = NULL;
595   PetscCall(ISInitializePackage());
596 
597   PetscCall(PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView));
598   (*mapping)->n  = n;
599   (*mapping)->bs = bs;
600   if (mode == PETSC_COPY_VALUES) {
601     PetscCall(PetscMalloc1(n,&in));
602     PetscCall(PetscArraycpy(in,indices,n));
603     (*mapping)->indices = in;
604     (*mapping)->dealloc_indices = PETSC_TRUE;
605     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
606   } else if (mode == PETSC_OWN_POINTER) {
607     (*mapping)->indices = (PetscInt*)indices;
608     (*mapping)->dealloc_indices = PETSC_TRUE;
609     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
610   } else if (mode == PETSC_USE_POINTER) {
611     (*mapping)->indices = (PetscInt*)indices;
612   }
613   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
614   PetscFunctionReturn(0);
615 }
616 
617 PetscFunctionList ISLocalToGlobalMappingList = NULL;
618 
619 /*@
620    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
621 
622    Not collective
623 
624    Input Parameters:
625 .  mapping - mapping data structure
626 
627    Level: advanced
628 
629 @*/
630 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
631 {
632   char                       type[256];
633   ISLocalToGlobalMappingType defaulttype = "Not set";
634   PetscBool                  flg;
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
638   PetscCall(ISLocalToGlobalMappingRegisterAll());
639   PetscObjectOptionsBegin((PetscObject)mapping);
640   PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg));
641   if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping,type));
642   PetscOptionsEnd();
643   PetscFunctionReturn(0);
644 }
645 
646 /*@
647    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
648    ordering and a global parallel ordering.
649 
650    Note Collective
651 
652    Input Parameters:
653 .  mapping - mapping data structure
654 
655    Level: advanced
656 
657 .seealso: `ISLocalToGlobalMappingCreate()`
658 @*/
659 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
660 {
661   PetscFunctionBegin;
662   if (!*mapping) PetscFunctionReturn(0);
663   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
664   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
665   if ((*mapping)->dealloc_indices) {
666     PetscCall(PetscFree((*mapping)->indices));
667   }
668   PetscCall(PetscFree((*mapping)->info_procs));
669   PetscCall(PetscFree((*mapping)->info_numprocs));
670   if ((*mapping)->info_indices) {
671     PetscInt i;
672 
673     PetscCall(PetscFree(((*mapping)->info_indices)[0]));
674     for (i=1; i<(*mapping)->info_nproc; i++) {
675       PetscCall(PetscFree(((*mapping)->info_indices)[i]));
676     }
677     PetscCall(PetscFree((*mapping)->info_indices));
678   }
679   if ((*mapping)->info_nodei) {
680     PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
681   }
682   PetscCall(PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei));
683   if ((*mapping)->ops->destroy) {
684     PetscCall((*(*mapping)->ops->destroy)(*mapping));
685   }
686   PetscCall(PetscHeaderDestroy(mapping));
687   *mapping = NULL;
688   PetscFunctionReturn(0);
689 }
690 
691 /*@
692     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
693     a new index set using the global numbering defined in an ISLocalToGlobalMapping
694     context.
695 
696     Collective on is
697 
698     Input Parameters:
699 +   mapping - mapping between local and global numbering
700 -   is - index set in local numbering
701 
702     Output Parameters:
703 .   newis - index set in global numbering
704 
705     Notes:
706     The output IS will have the same communicator of the input IS.
707 
708     Level: advanced
709 
710 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
711           `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()`
712 @*/
713 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
714 {
715   PetscInt       n,*idxout;
716   const PetscInt *idxin;
717 
718   PetscFunctionBegin;
719   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
720   PetscValidHeaderSpecific(is,IS_CLASSID,2);
721   PetscValidPointer(newis,3);
722 
723   PetscCall(ISGetLocalSize(is,&n));
724   PetscCall(ISGetIndices(is,&idxin));
725   PetscCall(PetscMalloc1(n,&idxout));
726   PetscCall(ISLocalToGlobalMappingApply(mapping,n,idxin,idxout));
727   PetscCall(ISRestoreIndices(is,&idxin));
728   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis));
729   PetscFunctionReturn(0);
730 }
731 
732 /*@
733    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
734    and converts them to the global numbering.
735 
736    Not collective
737 
738    Input Parameters:
739 +  mapping - the local to global mapping context
740 .  N - number of integers
741 -  in - input indices in local numbering
742 
743    Output Parameter:
744 .  out - indices in global numbering
745 
746    Notes:
747    The in and out array parameters may be identical.
748 
749    Level: advanced
750 
751 .seealso: `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
752           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
753           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
754 
755 @*/
756 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
757 {
758   PetscInt i,bs,Nmax;
759 
760   PetscFunctionBegin;
761   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
762   bs   = mapping->bs;
763   Nmax = bs*mapping->n;
764   if (bs == 1) {
765     const PetscInt *idx = mapping->indices;
766     for (i=0; i<N; i++) {
767       if (in[i] < 0) {
768         out[i] = in[i];
769         continue;
770       }
771       PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
772       out[i] = idx[in[i]];
773     }
774   } else {
775     const PetscInt *idx = mapping->indices;
776     for (i=0; i<N; i++) {
777       if (in[i] < 0) {
778         out[i] = in[i];
779         continue;
780       }
781       PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
782       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
783     }
784   }
785   PetscFunctionReturn(0);
786 }
787 
788 /*@
789    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
790 
791    Not collective
792 
793    Input Parameters:
794 +  mapping - the local to global mapping context
795 .  N - number of integers
796 -  in - input indices in local block numbering
797 
798    Output Parameter:
799 .  out - indices in global block numbering
800 
801    Notes:
802    The in and out array parameters may be identical.
803 
804    Example:
805      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
806      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
807 
808    Level: advanced
809 
810 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
811           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
812           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
813 
814 @*/
815 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
816 {
817   PetscInt       i,Nmax;
818   const PetscInt *idx;
819 
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
822   Nmax = mapping->n;
823   idx = mapping->indices;
824   for (i=0; i<N; i++) {
825     if (in[i] < 0) {
826       out[i] = in[i];
827       continue;
828     }
829     PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
830     out[i] = idx[in[i]];
831   }
832   PetscFunctionReturn(0);
833 }
834 
835 /*@
836     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
837     specified with a global numbering.
838 
839     Not collective
840 
841     Input Parameters:
842 +   mapping - mapping between local and global numbering
843 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
844            IS_GTOLM_DROP - drops the indices with no local value from the output list
845 .   n - number of global indices to map
846 -   idx - global indices to map
847 
848     Output Parameters:
849 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
850 -   idxout - local index of each global index, one must pass in an array long enough
851              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
852              idxout == NULL to determine the required length (returned in nout)
853              and then allocate the required space and call ISGlobalToLocalMappingApply()
854              a second time to set the values.
855 
856     Notes:
857     Either nout or idxout may be NULL. idx and idxout may be identical.
858 
859     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
860     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
861     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
862 
863     Level: advanced
864 
865     Developer Note: The manual page states that idx and idxout may be identical but the calling
866        sequence declares idx as const so it cannot be the same as idxout.
867 
868 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`,
869           `ISLocalToGlobalMappingDestroy()`
870 @*/
871 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
872 {
873   PetscFunctionBegin;
874   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
875   if (!mapping->data) {
876     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
877   }
878   PetscCall((*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout));
879   PetscFunctionReturn(0);
880 }
881 
882 /*@
883     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
884     a new index set using the local numbering defined in an ISLocalToGlobalMapping
885     context.
886 
887     Not collective
888 
889     Input Parameters:
890 +   mapping - mapping between local and global numbering
891 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
892            IS_GTOLM_DROP - drops the indices with no local value from the output list
893 -   is - index set in global numbering
894 
895     Output Parameters:
896 .   newis - index set in local numbering
897 
898     Notes:
899     The output IS will be sequential, as it encodes a purely local operation
900 
901     Level: advanced
902 
903 .seealso: `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
904           `ISLocalToGlobalMappingDestroy()`
905 @*/
906 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
907 {
908   PetscInt       n,nout,*idxout;
909   const PetscInt *idxin;
910 
911   PetscFunctionBegin;
912   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
913   PetscValidHeaderSpecific(is,IS_CLASSID,3);
914   PetscValidPointer(newis,4);
915 
916   PetscCall(ISGetLocalSize(is,&n));
917   PetscCall(ISGetIndices(is,&idxin));
918   if (type == IS_GTOLM_MASK) {
919     PetscCall(PetscMalloc1(n,&idxout));
920   } else {
921     PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL));
922     PetscCall(PetscMalloc1(nout,&idxout));
923   }
924   PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout));
925   PetscCall(ISRestoreIndices(is,&idxin));
926   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis));
927   PetscFunctionReturn(0);
928 }
929 
930 /*@
931     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
932     specified with a block global numbering.
933 
934     Not collective
935 
936     Input Parameters:
937 +   mapping - mapping between local and global numbering
938 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
939            IS_GTOLM_DROP - drops the indices with no local value from the output list
940 .   n - number of global indices to map
941 -   idx - global indices to map
942 
943     Output Parameters:
944 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
945 -   idxout - local index of each global index, one must pass in an array long enough
946              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
947              idxout == NULL to determine the required length (returned in nout)
948              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
949              a second time to set the values.
950 
951     Notes:
952     Either nout or idxout may be NULL. idx and idxout may be identical.
953 
954     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
955     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
956     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
957 
958     Level: advanced
959 
960     Developer Note: The manual page states that idx and idxout may be identical but the calling
961        sequence declares idx as const so it cannot be the same as idxout.
962 
963 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
964           `ISLocalToGlobalMappingDestroy()`
965 @*/
966 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
967                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
968 {
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
971   if (!mapping->data) {
972     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
973   }
974   PetscCall((*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout));
975   PetscFunctionReturn(0);
976 }
977 
978 /*@C
979     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
980      each index shared by more than one processor
981 
982     Collective on ISLocalToGlobalMapping
983 
984     Input Parameter:
985 .   mapping - the mapping from local to global indexing
986 
987     Output Parameters:
988 +   nproc - number of processors that are connected to this one
989 .   proc - neighboring processors
990 .   numproc - number of indices for each subdomain (processor)
991 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
992 
993     Level: advanced
994 
995     Fortran Usage:
996 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
997 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
998           PetscInt indices[nproc][numprocmax],ierr)
999         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1000         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1001 
1002 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1003           `ISLocalToGlobalMappingRestoreInfo()`
1004 @*/
1005 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1006 {
1007   PetscFunctionBegin;
1008   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1009   if (mapping->info_cached) {
1010     *nproc    = mapping->info_nproc;
1011     *procs    = mapping->info_procs;
1012     *numprocs = mapping->info_numprocs;
1013     *indices  = mapping->info_indices;
1014   } else {
1015     PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices));
1016   }
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1021 {
1022   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1023   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1024   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1025   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart;
1026   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1027   PetscInt       first_procs,first_numprocs,*first_indices;
1028   MPI_Request    *recv_waits,*send_waits;
1029   MPI_Status     recv_status,*send_status,*recv_statuses;
1030   MPI_Comm       comm;
1031   PetscBool      debug = PETSC_FALSE;
1032 
1033   PetscFunctionBegin;
1034   PetscCall(PetscObjectGetComm((PetscObject)mapping,&comm));
1035   PetscCallMPI(MPI_Comm_size(comm,&size));
1036   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1037   if (size == 1) {
1038     *nproc         = 0;
1039     *procs         = NULL;
1040     PetscCall(PetscNew(numprocs));
1041     (*numprocs)[0] = 0;
1042     PetscCall(PetscNew(indices));
1043     (*indices)[0]  = NULL;
1044     /* save info for reuse */
1045     mapping->info_nproc = *nproc;
1046     mapping->info_procs = *procs;
1047     mapping->info_numprocs = *numprocs;
1048     mapping->info_indices = *indices;
1049     mapping->info_cached = PETSC_TRUE;
1050     PetscFunctionReturn(0);
1051   }
1052 
1053   PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL));
1054 
1055   /*
1056     Notes on ISLocalToGlobalMappingGetBlockInfo
1057 
1058     globally owned node - the nodes that have been assigned to this processor in global
1059            numbering, just for this routine.
1060 
1061     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1062            boundary (i.e. is has more than one local owner)
1063 
1064     locally owned node - node that exists on this processors subdomain
1065 
1066     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1067            local subdomain
1068   */
1069   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag1));
1070   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag2));
1071   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag3));
1072 
1073   for (i=0; i<n; i++) {
1074     if (lindices[i] > max) max = lindices[i];
1075   }
1076   PetscCall(MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm));
1077   Ng++;
1078   PetscCallMPI(MPI_Comm_size(comm,&size));
1079   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1080   scale  = Ng/size + 1;
1081   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1082   rstart = scale*rank;
1083 
1084   /* determine ownership ranges of global indices */
1085   PetscCall(PetscMalloc1(2*size,&nprocs));
1086   PetscCall(PetscArrayzero(nprocs,2*size));
1087 
1088   /* determine owners of each local node  */
1089   PetscCall(PetscMalloc1(n,&owner));
1090   for (i=0; i<n; i++) {
1091     proc             = lindices[i]/scale; /* processor that globally owns this index */
1092     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1093     owner[i]         = proc;
1094     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1095   }
1096   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1097   PetscCall(PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends));
1098 
1099   /* inform other processors of number of messages and max length*/
1100   PetscCall(PetscMaxSum(comm,nprocs,&nmax,&nrecvs));
1101   PetscCall(PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs));
1102 
1103   /* post receives for owned rows */
1104   PetscCall(PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs));
1105   PetscCall(PetscMalloc1(nrecvs+1,&recv_waits));
1106   for (i=0; i<nrecvs; i++) {
1107     PetscCallMPI(MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i));
1108   }
1109 
1110   /* pack messages containing lists of local nodes to owners */
1111   PetscCall(PetscMalloc1(2*n+1,&sends));
1112   PetscCall(PetscMalloc1(size+1,&starts));
1113   starts[0] = 0;
1114   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1115   for (i=0; i<n; i++) {
1116     sends[starts[owner[i]]++] = lindices[i];
1117     sends[starts[owner[i]]++] = i;
1118   }
1119   PetscCall(PetscFree(owner));
1120   starts[0] = 0;
1121   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1122 
1123   /* send the messages */
1124   PetscCall(PetscMalloc1(nsends+1,&send_waits));
1125   PetscCall(PetscMalloc1(nsends+1,&dest));
1126   cnt = 0;
1127   for (i=0; i<size; i++) {
1128     if (nprocs[2*i]) {
1129       PetscCallMPI(MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt));
1130       dest[cnt] = i;
1131       cnt++;
1132     }
1133   }
1134   PetscCall(PetscFree(starts));
1135 
1136   /* wait on receives */
1137   PetscCall(PetscMalloc1(nrecvs+1,&source));
1138   PetscCall(PetscMalloc1(nrecvs+1,&len));
1139   cnt  = nrecvs;
1140   PetscCall(PetscCalloc1(ng+1,&nownedsenders));
1141   while (cnt) {
1142     PetscCallMPI(MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status));
1143     /* unpack receives into our local space */
1144     PetscCallMPI(MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]));
1145     source[imdex] = recv_status.MPI_SOURCE;
1146     len[imdex]    = len[imdex]/2;
1147     /* count how many local owners for each of my global owned indices */
1148     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1149     cnt--;
1150   }
1151   PetscCall(PetscFree(recv_waits));
1152 
1153   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1154   nownedm = 0;
1155   for (i=0; i<ng; i++) {
1156     if (nownedsenders[i] > 1) nownedm += nownedsenders[i];
1157   }
1158 
1159   /* create single array to contain rank of all local owners of each globally owned index */
1160   PetscCall(PetscMalloc1(nownedm+1,&ownedsenders));
1161   PetscCall(PetscMalloc1(ng+1,&starts));
1162   starts[0] = 0;
1163   for (i=1; i<ng; i++) {
1164     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1165     else starts[i] = starts[i-1];
1166   }
1167 
1168   /* for each nontrivial globally owned node list all arriving processors */
1169   for (i=0; i<nrecvs; i++) {
1170     for (j=0; j<len[i]; j++) {
1171       node = recvs[2*i*nmax+2*j]-rstart;
1172       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1173     }
1174   }
1175 
1176   if (debug) { /* -----------------------------------  */
1177     starts[0] = 0;
1178     for (i=1; i<ng; i++) {
1179       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1180       else starts[i] = starts[i-1];
1181     }
1182     for (i=0; i<ng; i++) {
1183       if (nownedsenders[i] > 1) {
1184         PetscCall(PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart));
1185         for (j=0; j<nownedsenders[i]; j++) {
1186           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]));
1187         }
1188         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1189       }
1190     }
1191     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1192   } /* -----------------------------------  */
1193 
1194   /* wait on original sends */
1195   if (nsends) {
1196     PetscCall(PetscMalloc1(nsends,&send_status));
1197     PetscCallMPI(MPI_Waitall(nsends,send_waits,send_status));
1198     PetscCall(PetscFree(send_status));
1199   }
1200   PetscCall(PetscFree(send_waits));
1201   PetscCall(PetscFree(sends));
1202   PetscCall(PetscFree(nprocs));
1203 
1204   /* pack messages to send back to local owners */
1205   starts[0] = 0;
1206   for (i=1; i<ng; i++) {
1207     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1208     else starts[i] = starts[i-1];
1209   }
1210   nsends2 = nrecvs;
1211   PetscCall(PetscMalloc1(nsends2+1,&nprocs)); /* length of each message */
1212   for (i=0; i<nrecvs; i++) {
1213     nprocs[i] = 1;
1214     for (j=0; j<len[i]; j++) {
1215       node = recvs[2*i*nmax+2*j]-rstart;
1216       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1217     }
1218   }
1219   nt = 0;
1220   for (i=0; i<nsends2; i++) nt += nprocs[i];
1221 
1222   PetscCall(PetscMalloc1(nt+1,&sends2));
1223   PetscCall(PetscMalloc1(nsends2+1,&starts2));
1224 
1225   starts2[0] = 0;
1226   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1227   /*
1228      Each message is 1 + nprocs[i] long, and consists of
1229        (0) the number of nodes being sent back
1230        (1) the local node number,
1231        (2) the number of processors sharing it,
1232        (3) the processors sharing it
1233   */
1234   for (i=0; i<nsends2; i++) {
1235     cnt = 1;
1236     sends2[starts2[i]] = 0;
1237     for (j=0; j<len[i]; j++) {
1238       node = recvs[2*i*nmax+2*j]-rstart;
1239       if (nownedsenders[node] > 1) {
1240         sends2[starts2[i]]++;
1241         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1242         sends2[starts2[i]+cnt++] = nownedsenders[node];
1243         PetscCall(PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]));
1244         cnt += nownedsenders[node];
1245       }
1246     }
1247   }
1248 
1249   /* receive the message lengths */
1250   nrecvs2 = nsends;
1251   PetscCall(PetscMalloc1(nrecvs2+1,&lens2));
1252   PetscCall(PetscMalloc1(nrecvs2+1,&starts3));
1253   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1254   for (i=0; i<nrecvs2; i++) {
1255     PetscCallMPI(MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i));
1256   }
1257 
1258   /* send the message lengths */
1259   for (i=0; i<nsends2; i++) {
1260     PetscCallMPI(MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm));
1261   }
1262 
1263   /* wait on receives of lens */
1264   if (nrecvs2) {
1265     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1266     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1267     PetscCall(PetscFree(recv_statuses));
1268   }
1269   PetscCall(PetscFree(recv_waits));
1270 
1271   starts3[0] = 0;
1272   nt         = 0;
1273   for (i=0; i<nrecvs2-1; i++) {
1274     starts3[i+1] = starts3[i] + lens2[i];
1275     nt          += lens2[i];
1276   }
1277   if (nrecvs2) nt += lens2[nrecvs2-1];
1278 
1279   PetscCall(PetscMalloc1(nt+1,&recvs2));
1280   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1281   for (i=0; i<nrecvs2; i++) {
1282     PetscCallMPI(MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i));
1283   }
1284 
1285   /* send the messages */
1286   PetscCall(PetscMalloc1(nsends2+1,&send_waits));
1287   for (i=0; i<nsends2; i++) {
1288     PetscCallMPI(MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i));
1289   }
1290 
1291   /* wait on receives */
1292   if (nrecvs2) {
1293     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1294     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1295     PetscCall(PetscFree(recv_statuses));
1296   }
1297   PetscCall(PetscFree(recv_waits));
1298   PetscCall(PetscFree(nprocs));
1299 
1300   if (debug) { /* -----------------------------------  */
1301     cnt = 0;
1302     for (i=0; i<nrecvs2; i++) {
1303       nt = recvs2[cnt++];
1304       for (j=0; j<nt; j++) {
1305         PetscCall(PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]));
1306         for (k=0; k<recvs2[cnt+1]; k++) {
1307           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]));
1308         }
1309         cnt += 2 + recvs2[cnt+1];
1310         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1311       }
1312     }
1313     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1314   } /* -----------------------------------  */
1315 
1316   /* count number subdomains for each local node */
1317   PetscCall(PetscCalloc1(size,&nprocs));
1318   cnt  = 0;
1319   for (i=0; i<nrecvs2; i++) {
1320     nt = recvs2[cnt++];
1321     for (j=0; j<nt; j++) {
1322       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1323       cnt += 2 + recvs2[cnt+1];
1324     }
1325   }
1326   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1327   *nproc    = nt;
1328   PetscCall(PetscMalloc1(nt+1,procs));
1329   PetscCall(PetscMalloc1(nt+1,numprocs));
1330   PetscCall(PetscMalloc1(nt+1,indices));
1331   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1332   PetscCall(PetscMalloc1(size,&bprocs));
1333   cnt  = 0;
1334   for (i=0; i<size; i++) {
1335     if (nprocs[i] > 0) {
1336       bprocs[i]        = cnt;
1337       (*procs)[cnt]    = i;
1338       (*numprocs)[cnt] = nprocs[i];
1339       PetscCall(PetscMalloc1(nprocs[i],&(*indices)[cnt]));
1340       cnt++;
1341     }
1342   }
1343 
1344   /* make the list of subdomains for each nontrivial local node */
1345   PetscCall(PetscArrayzero(*numprocs,nt));
1346   cnt  = 0;
1347   for (i=0; i<nrecvs2; i++) {
1348     nt = recvs2[cnt++];
1349     for (j=0; j<nt; j++) {
1350       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1351       cnt += 2 + recvs2[cnt+1];
1352     }
1353   }
1354   PetscCall(PetscFree(bprocs));
1355   PetscCall(PetscFree(recvs2));
1356 
1357   /* sort the node indexing by their global numbers */
1358   nt = *nproc;
1359   for (i=0; i<nt; i++) {
1360     PetscCall(PetscMalloc1((*numprocs)[i],&tmp));
1361     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1362     PetscCall(PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]));
1363     PetscCall(PetscFree(tmp));
1364   }
1365 
1366   if (debug) { /* -----------------------------------  */
1367     nt = *nproc;
1368     for (i=0; i<nt; i++) {
1369       PetscCall(PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]));
1370       for (j=0; j<(*numprocs)[i]; j++) {
1371         PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]));
1372       }
1373       PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1374     }
1375     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1376   } /* -----------------------------------  */
1377 
1378   /* wait on sends */
1379   if (nsends2) {
1380     PetscCall(PetscMalloc1(nsends2,&send_status));
1381     PetscCallMPI(MPI_Waitall(nsends2,send_waits,send_status));
1382     PetscCall(PetscFree(send_status));
1383   }
1384 
1385   PetscCall(PetscFree(starts3));
1386   PetscCall(PetscFree(dest));
1387   PetscCall(PetscFree(send_waits));
1388 
1389   PetscCall(PetscFree(nownedsenders));
1390   PetscCall(PetscFree(ownedsenders));
1391   PetscCall(PetscFree(starts));
1392   PetscCall(PetscFree(starts2));
1393   PetscCall(PetscFree(lens2));
1394 
1395   PetscCall(PetscFree(source));
1396   PetscCall(PetscFree(len));
1397   PetscCall(PetscFree(recvs));
1398   PetscCall(PetscFree(nprocs));
1399   PetscCall(PetscFree(sends2));
1400 
1401   /* put the information about myself as the first entry in the list */
1402   first_procs    = (*procs)[0];
1403   first_numprocs = (*numprocs)[0];
1404   first_indices  = (*indices)[0];
1405   for (i=0; i<*nproc; i++) {
1406     if ((*procs)[i] == rank) {
1407       (*procs)[0]    = (*procs)[i];
1408       (*numprocs)[0] = (*numprocs)[i];
1409       (*indices)[0]  = (*indices)[i];
1410       (*procs)[i]    = first_procs;
1411       (*numprocs)[i] = first_numprocs;
1412       (*indices)[i]  = first_indices;
1413       break;
1414     }
1415   }
1416 
1417   /* save info for reuse */
1418   mapping->info_nproc = *nproc;
1419   mapping->info_procs = *procs;
1420   mapping->info_numprocs = *numprocs;
1421   mapping->info_indices = *indices;
1422   mapping->info_cached = PETSC_TRUE;
1423   PetscFunctionReturn(0);
1424 }
1425 
1426 /*@C
1427     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1428 
1429     Collective on ISLocalToGlobalMapping
1430 
1431     Input Parameter:
1432 .   mapping - the mapping from local to global indexing
1433 
1434     Output Parameters:
1435 +   nproc - number of processors that are connected to this one
1436 .   proc - neighboring processors
1437 .   numproc - number of indices for each processor
1438 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1439 
1440     Level: advanced
1441 
1442 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1443           `ISLocalToGlobalMappingGetInfo()`
1444 @*/
1445 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1446 {
1447   PetscFunctionBegin;
1448   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1449   if (mapping->info_free) {
1450     PetscCall(PetscFree(*numprocs));
1451     if (*indices) {
1452       PetscInt i;
1453 
1454       PetscCall(PetscFree((*indices)[0]));
1455       for (i=1; i<*nproc; i++) {
1456         PetscCall(PetscFree((*indices)[i]));
1457       }
1458       PetscCall(PetscFree(*indices));
1459     }
1460   }
1461   *nproc    = 0;
1462   *procs    = NULL;
1463   *numprocs = NULL;
1464   *indices  = NULL;
1465   PetscFunctionReturn(0);
1466 }
1467 
1468 /*@C
1469     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1470      each index shared by more than one processor
1471 
1472     Collective on ISLocalToGlobalMapping
1473 
1474     Input Parameter:
1475 .   mapping - the mapping from local to global indexing
1476 
1477     Output Parameters:
1478 +   nproc - number of processors that are connected to this one
1479 .   proc - neighboring processors
1480 .   numproc - number of indices for each subdomain (processor)
1481 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1482 
1483     Level: advanced
1484 
1485     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1486 
1487     Fortran Usage:
1488 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1489 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1490           PetscInt indices[nproc][numprocmax],ierr)
1491         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1492         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1493 
1494 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1495           `ISLocalToGlobalMappingRestoreInfo()`
1496 @*/
1497 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1498 {
1499   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs,i,j,k;
1500 
1501   PetscFunctionBegin;
1502   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1503   bs = mapping->bs;
1504   PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices));
1505   if (bs > 1) { /* we need to expand the cached info */
1506     PetscCall(PetscCalloc1(*nproc,&*indices));
1507     PetscCall(PetscCalloc1(*nproc,&*numprocs));
1508     for (i=0; i<*nproc; i++) {
1509       PetscCall(PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]));
1510       for (j=0; j<bnumprocs[i]; j++) {
1511         for (k=0; k<bs; k++) {
1512           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1513         }
1514       }
1515       (*numprocs)[i] = bnumprocs[i]*bs;
1516     }
1517     mapping->info_free = PETSC_TRUE;
1518   } else {
1519     *numprocs = bnumprocs;
1520     *indices  = bindices;
1521   }
1522   PetscFunctionReturn(0);
1523 }
1524 
1525 /*@C
1526     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1527 
1528     Collective on ISLocalToGlobalMapping
1529 
1530     Input Parameter:
1531 .   mapping - the mapping from local to global indexing
1532 
1533     Output Parameters:
1534 +   nproc - number of processors that are connected to this one
1535 .   proc - neighboring processors
1536 .   numproc - number of indices for each processor
1537 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1538 
1539     Level: advanced
1540 
1541 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1542           `ISLocalToGlobalMappingGetInfo()`
1543 @*/
1544 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1545 {
1546   PetscFunctionBegin;
1547   PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices));
1548   PetscFunctionReturn(0);
1549 }
1550 
1551 /*@C
1552     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1553 
1554     Collective on ISLocalToGlobalMapping
1555 
1556     Input Parameter:
1557 .   mapping - the mapping from local to global indexing
1558 
1559     Output Parameters:
1560 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1561 .   count - number of neighboring processors per node
1562 -   indices - indices of processes sharing the node (sorted)
1563 
1564     Level: advanced
1565 
1566     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1567 
1568 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1569           `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()`
1570 @*/
1571 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1572 {
1573   PetscInt       n;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1577   PetscCall(ISLocalToGlobalMappingGetSize(mapping,&n));
1578   if (!mapping->info_nodec) {
1579     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1580 
1581     PetscCall(PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei));
1582     PetscCall(ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1583     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1584     m = n;
1585     mapping->info_nodec[n] = 0;
1586     for (i=1;i<n_neigh;i++) {
1587       PetscInt j;
1588 
1589       m += n_shared[i];
1590       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1591     }
1592     if (n) PetscCall(PetscMalloc1(m,&mapping->info_nodei[0]));
1593     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1594     PetscCall(PetscArrayzero(mapping->info_nodec,n));
1595     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1596     for (i=1;i<n_neigh;i++) {
1597       PetscInt j;
1598 
1599       for (j=0;j<n_shared[i];j++) {
1600         PetscInt k = shared[i][j];
1601 
1602         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1603         mapping->info_nodec[k] += 1;
1604       }
1605     }
1606     for (i=0;i<n;i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]));
1607     PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1608   }
1609   if (nnodes)  *nnodes  = n;
1610   if (count)   *count   = mapping->info_nodec;
1611   if (indices) *indices = mapping->info_nodei;
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 /*@C
1616     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1617 
1618     Collective on ISLocalToGlobalMapping
1619 
1620     Input Parameter:
1621 .   mapping - the mapping from local to global indexing
1622 
1623     Output Parameters:
1624 +   nnodes - number of local nodes
1625 .   count - number of neighboring processors per node
1626 -   indices - indices of processes sharing the node (sorted)
1627 
1628     Level: advanced
1629 
1630 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1631           `ISLocalToGlobalMappingGetInfo()`
1632 @*/
1633 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1634 {
1635   PetscFunctionBegin;
1636   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1637   if (nnodes)  *nnodes  = 0;
1638   if (count)   *count   = NULL;
1639   if (indices) *indices = NULL;
1640   PetscFunctionReturn(0);
1641 }
1642 
1643 /*@C
1644    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1645 
1646    Not Collective
1647 
1648    Input Parameter:
1649 . ltog - local to global mapping
1650 
1651    Output Parameter:
1652 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1653 
1654    Level: advanced
1655 
1656    Notes:
1657     ISLocalToGlobalMappingGetSize() returns the length the this array
1658 
1659 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1660 @*/
1661 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1662 {
1663   PetscFunctionBegin;
1664   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1665   PetscValidPointer(array,2);
1666   if (ltog->bs == 1) {
1667     *array = ltog->indices;
1668   } else {
1669     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1670     const PetscInt *ii;
1671 
1672     PetscCall(PetscMalloc1(bs*n,&jj));
1673     *array = jj;
1674     k    = 0;
1675     ii   = ltog->indices;
1676     for (i=0; i<n; i++)
1677       for (j=0; j<bs; j++)
1678         jj[k++] = bs*ii[i] + j;
1679   }
1680   PetscFunctionReturn(0);
1681 }
1682 
1683 /*@C
1684    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1685 
1686    Not Collective
1687 
1688    Input Parameters:
1689 + ltog - local to global mapping
1690 - array - array of indices
1691 
1692    Level: advanced
1693 
1694 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1695 @*/
1696 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1697 {
1698   PetscFunctionBegin;
1699   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1700   PetscValidPointer(array,2);
1701   PetscCheck(ltog->bs != 1 || *array == ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1702 
1703   if (ltog->bs > 1) {
1704     PetscCall(PetscFree(*(void**)array));
1705   }
1706   PetscFunctionReturn(0);
1707 }
1708 
1709 /*@C
1710    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1711 
1712    Not Collective
1713 
1714    Input Parameter:
1715 . ltog - local to global mapping
1716 
1717    Output Parameter:
1718 . array - array of indices
1719 
1720    Level: advanced
1721 
1722 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1723 @*/
1724 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1725 {
1726   PetscFunctionBegin;
1727   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1728   PetscValidPointer(array,2);
1729   *array = ltog->indices;
1730   PetscFunctionReturn(0);
1731 }
1732 
1733 /*@C
1734    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1735 
1736    Not Collective
1737 
1738    Input Parameters:
1739 + ltog - local to global mapping
1740 - array - array of indices
1741 
1742    Level: advanced
1743 
1744 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1745 @*/
1746 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1747 {
1748   PetscFunctionBegin;
1749   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1750   PetscValidPointer(array,2);
1751   PetscCheck(*array == ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1752   *array = NULL;
1753   PetscFunctionReturn(0);
1754 }
1755 
1756 /*@C
1757    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1758 
1759    Not Collective
1760 
1761    Input Parameters:
1762 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1763 . n - number of mappings to concatenate
1764 - ltogs - local to global mappings
1765 
1766    Output Parameter:
1767 . ltogcat - new mapping
1768 
1769    Note: this currently always returns a mapping with block size of 1
1770 
1771    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1772 
1773    Level: advanced
1774 
1775 .seealso: `ISLocalToGlobalMappingCreate()`
1776 @*/
1777 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1778 {
1779   PetscInt       i,cnt,m,*idx;
1780 
1781   PetscFunctionBegin;
1782   PetscCheck(n >= 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1783   if (n > 0) PetscValidPointer(ltogs,3);
1784   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1785   PetscValidPointer(ltogcat,4);
1786   for (cnt=0,i=0; i<n; i++) {
1787     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1788     cnt += m;
1789   }
1790   PetscCall(PetscMalloc1(cnt,&idx));
1791   for (cnt=0,i=0; i<n; i++) {
1792     const PetscInt *subidx;
1793     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1794     PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx));
1795     PetscCall(PetscArraycpy(&idx[cnt],subidx,m));
1796     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx));
1797     cnt += m;
1798   }
1799   PetscCall(ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat));
1800   PetscFunctionReturn(0);
1801 }
1802 
1803 /*MC
1804       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1805                                     used this is good for only small and moderate size problems.
1806 
1807    Options Database Keys:
1808 .   -islocaltoglobalmapping_type basic - select this method
1809 
1810    Level: beginner
1811 
1812 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1813 M*/
1814 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1815 {
1816   PetscFunctionBegin;
1817   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1818   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1819   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1820   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1821   PetscFunctionReturn(0);
1822 }
1823 
1824 /*MC
1825       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1826                                     used this is good for large memory problems.
1827 
1828    Options Database Keys:
1829 .   -islocaltoglobalmapping_type hash - select this method
1830 
1831    Notes:
1832     This is selected automatically for large problems if the user does not set the type.
1833 
1834    Level: beginner
1835 
1836 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1837 M*/
1838 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1839 {
1840   PetscFunctionBegin;
1841   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1842   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1843   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1844   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1845   PetscFunctionReturn(0);
1846 }
1847 
1848 /*@C
1849     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1850 
1851    Not Collective
1852 
1853    Input Parameters:
1854 +  sname - name of a new method
1855 -  routine_create - routine to create method context
1856 
1857    Notes:
1858    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1859 
1860    Sample usage:
1861 .vb
1862    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1863 .ve
1864 
1865    Then, your mapping can be chosen with the procedural interface via
1866 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1867    or at runtime via the option
1868 $     -islocaltoglobalmapping_type my_mapper
1869 
1870    Level: advanced
1871 
1872 .seealso: `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
1873 
1874 @*/
1875 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1876 {
1877   PetscFunctionBegin;
1878   PetscCall(ISInitializePackage());
1879   PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function));
1880   PetscFunctionReturn(0);
1881 }
1882 
1883 /*@C
1884    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1885 
1886    Logically Collective on ISLocalToGlobalMapping
1887 
1888    Input Parameters:
1889 +  ltog - the ISLocalToGlobalMapping object
1890 -  type - a known method
1891 
1892    Options Database Key:
1893 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1894     of available methods (for instance, basic or hash)
1895 
1896    Notes:
1897    See "petsc/include/petscis.h" for available methods
1898 
1899   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1900   then set the ISLocalToGlobalMapping type from the options database rather than by using
1901   this routine.
1902 
1903   Level: intermediate
1904 
1905   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1906   are accessed by ISLocalToGlobalMappingSetType().
1907 
1908 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()`
1909 @*/
1910 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1911 {
1912   PetscBool      match;
1913   PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1914 
1915   PetscFunctionBegin;
1916   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1917   if (type) PetscValidCharPointer(type,2);
1918 
1919   PetscCall(PetscObjectTypeCompare((PetscObject)ltog,type,&match));
1920   if (match) PetscFunctionReturn(0);
1921 
1922   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1923   if (type) {
1924     PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r));
1925     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1926   }
1927   /* Destroy the previous private LTOG context */
1928   if (ltog->ops->destroy) {
1929     PetscCall((*ltog->ops->destroy)(ltog));
1930     ltog->ops->destroy = NULL;
1931   }
1932   PetscCall(PetscObjectChangeTypeName((PetscObject)ltog,type));
1933   if (r) PetscCall((*r)(ltog));
1934   PetscFunctionReturn(0);
1935 }
1936 
1937 /*@C
1938    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1939 
1940    Not Collective
1941 
1942    Input Parameter:
1943 .  ltog - the ISLocalToGlobalMapping object
1944 
1945    Output Parameter:
1946 .  type - the type
1947 
1948    Level: intermediate
1949 
1950 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`
1951 @*/
1952 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1953 {
1954   PetscFunctionBegin;
1955   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1956   PetscValidPointer(type,2);
1957   *type = ((PetscObject)ltog)->type_name;
1958   PetscFunctionReturn(0);
1959 }
1960 
1961 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1962 
1963 /*@C
1964   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1965 
1966   Not Collective
1967 
1968   Level: advanced
1969 
1970 .seealso: `ISRegister()`, `ISLocalToGlobalRegister()`
1971 @*/
1972 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1973 {
1974   PetscFunctionBegin;
1975   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1976   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1977   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
1978   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
1979   PetscFunctionReturn(0);
1980 }
1981