xref: /petsc/src/vec/is/utils/isltog.c (revision 8fb5bd83c3955fefcf33a54e3bb66920a9fa884b)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47 
48   PetscFunctionBeginHot;
49   *pStart = 0;
50   *points = NULL;
51   PetscCall(ISGetLocalSize(pointIS, &numCells));
52   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
53   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
54   *pEnd   = *pStart + numCells;
55   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
56   PetscFunctionReturn(0);
57 }
58 
59 /*@C
60   ISRestorePointRange - Destroys the traversal description
61 
62   Not collective
63 
64   Input Parameters:
65 + pointIS - The IS object
66 . pStart  - The first index, from ISGetPointRange()
67 . pEnd    - One past the last index, from ISGetPointRange()
68 - points  - The indices, from ISGetPointRange()
69 
70   Notes:
71   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
72 $ ISGetPointRange(is, &pStart, &pEnd, &points);
73 $ for (p = pStart; p < pEnd; ++p) {
74 $   const PetscInt point = points ? points[p] : p;
75 $ }
76 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
77 
78   Level: intermediate
79 
80 .seealso: `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
81 @*/
82 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
83 {
84   PetscInt       step = 1;
85   PetscBool      isStride;
86 
87   PetscFunctionBeginHot;
88   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
89   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
90   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
91   PetscFunctionReturn(0);
92 }
93 
94 /*@C
95   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
96 
97   Not collective
98 
99   Input Parameters:
100 + subpointIS - The IS object to be configured
101 . pStar   t  - The first index of the subrange
102 . pEnd       - One past the last index for the subrange
103 - points     - The indices for the entire range, from ISGetPointRange()
104 
105   Output Parameters:
106 . subpointIS - The IS object now configured to be a subrange
107 
108   Notes:
109   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
110 
111   Level: intermediate
112 
113 .seealso: `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()`
114 @*/
115 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
116 {
117   PetscFunctionBeginHot;
118   if (points) {
119     PetscCall(ISSetType(subpointIS, ISGENERAL));
120     PetscCall(ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER));
121   } else {
122     PetscCall(ISSetType(subpointIS, ISSTRIDE));
123     PetscCall(ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /* -----------------------------------------------------------------------------------------*/
129 
130 /*
131     Creates the global mapping information in the ISLocalToGlobalMapping structure
132 
133     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
134 */
135 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
136 {
137   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
138 
139   PetscFunctionBegin;
140   if (mapping->data) PetscFunctionReturn(0);
141   end   = 0;
142   start = PETSC_MAX_INT;
143 
144   for (i=0; i<n; i++) {
145     if (idx[i] < 0) continue;
146     if (idx[i] < start) start = idx[i];
147     if (idx[i] > end)   end   = idx[i];
148   }
149   if (start > end) {start = 0; end = -1;}
150   mapping->globalstart = start;
151   mapping->globalend   = end;
152   if (!((PetscObject)mapping)->type_name) {
153     if ((end - start) > PetscMax(4*n,1000000)) {
154       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH));
155     } else {
156       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC));
157     }
158   }
159   if (mapping->ops->globaltolocalmappingsetup) PetscCall((*mapping->ops->globaltolocalmappingsetup)(mapping));
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
164 {
165   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
166   ISLocalToGlobalMapping_Basic *map;
167 
168   PetscFunctionBegin;
169   start            = mapping->globalstart;
170   end              = mapping->globalend;
171   PetscCall(PetscNew(&map));
172   PetscCall(PetscMalloc1(end-start+2,&globals));
173   map->globals     = globals;
174   for (i=0; i<end-start+1; i++) globals[i] = -1;
175   for (i=0; i<n; i++) {
176     if (idx[i] < 0) continue;
177     globals[idx[i] - start] = i;
178   }
179   mapping->data = (void*)map;
180   PetscCall(PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt)));
181   PetscFunctionReturn(0);
182 }
183 
184 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
185 {
186   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
187   ISLocalToGlobalMapping_Hash *map;
188 
189   PetscFunctionBegin;
190   PetscCall(PetscNew(&map));
191   PetscCall(PetscHMapICreate(&map->globalht));
192   for (i=0; i<n; i++) {
193     if (idx[i] < 0) continue;
194     PetscCall(PetscHMapISet(map->globalht,idx[i],i));
195   }
196   mapping->data = (void*)map;
197   PetscCall(PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt)));
198   PetscFunctionReturn(0);
199 }
200 
201 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
202 {
203   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
204 
205   PetscFunctionBegin;
206   if (!map) PetscFunctionReturn(0);
207   PetscCall(PetscFree(map->globals));
208   PetscCall(PetscFree(mapping->data));
209   PetscFunctionReturn(0);
210 }
211 
212 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
213 {
214   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
215 
216   PetscFunctionBegin;
217   if (!map) PetscFunctionReturn(0);
218   PetscCall(PetscHMapIDestroy(&map->globalht));
219   PetscCall(PetscFree(mapping->data));
220   PetscFunctionReturn(0);
221 }
222 
223 #define GTOLTYPE _Basic
224 #define GTOLNAME _Basic
225 #define GTOLBS mapping->bs
226 #define GTOL(g, local) do {                  \
227     local = map->globals[g/bs - start];      \
228     if (local >= 0) local = bs*local + (g % bs); \
229   } while (0)
230 
231 #include <../src/vec/is/utils/isltog.h>
232 
233 #define GTOLTYPE _Basic
234 #define GTOLNAME Block_Basic
235 #define GTOLBS 1
236 #define GTOL(g, local) do {                  \
237     local = map->globals[g - start];         \
238   } while (0)
239 #include <../src/vec/is/utils/isltog.h>
240 
241 #define GTOLTYPE _Hash
242 #define GTOLNAME _Hash
243 #define GTOLBS mapping->bs
244 #define GTOL(g, local) do {                         \
245     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
246     if (local >= 0) local = bs*local + (g % bs);   \
247    } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME Block_Hash
252 #define GTOLBS 1
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g,&local);    \
255   } while (0)
256 #include <../src/vec/is/utils/isltog.h>
257 
258 /*@
259     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
260 
261     Not Collective
262 
263     Input Parameter:
264 .   ltog - local to global mapping
265 
266     Output Parameter:
267 .   nltog - the duplicated local to global mapping
268 
269     Level: advanced
270 
271 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
272 @*/
273 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
274 {
275   ISLocalToGlobalMappingType l2gtype;
276 
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
279   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog));
280   PetscCall(ISLocalToGlobalMappingGetType(ltog,&l2gtype));
281   PetscCall(ISLocalToGlobalMappingSetType(*nltog,l2gtype));
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
287 
288     Not Collective
289 
290     Input Parameter:
291 .   ltog - local to global mapping
292 
293     Output Parameter:
294 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
295 
296     Level: advanced
297 
298 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
299 @*/
300 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
301 {
302   PetscFunctionBegin;
303   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
304   PetscValidIntPointer(n,2);
305   *n = mapping->bs*mapping->n;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@C
310    ISLocalToGlobalMappingViewFromOptions - View from Options
311 
312    Collective on ISLocalToGlobalMapping
313 
314    Input Parameters:
315 +  A - the local to global mapping object
316 .  obj - Optional object
317 -  name - command line option
318 
319    Level: intermediate
320 .seealso: `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()`
321 @*/
322 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
323 {
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
326   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
327   PetscFunctionReturn(0);
328 }
329 
330 /*@C
331     ISLocalToGlobalMappingView - View a local to global mapping
332 
333     Not Collective
334 
335     Input Parameters:
336 +   ltog - local to global mapping
337 -   viewer - viewer
338 
339     Level: advanced
340 
341 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
344 {
345   PetscInt       i;
346   PetscMPIInt    rank;
347   PetscBool      iascii;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
351   if (!viewer) {
352     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer));
353   }
354   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
355 
356   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank));
357   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
358   if (iascii) {
359     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer));
360     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
361     for (i=0; i<mapping->n; i++) {
362       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]));
363     }
364     PetscCall(PetscViewerFlush(viewer));
365     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
366   }
367   PetscFunctionReturn(0);
368 }
369 
370 /*@
371     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
372     ordering and a global parallel ordering.
373 
374     Not collective
375 
376     Input Parameter:
377 .   is - index set containing the global numbers for each local number
378 
379     Output Parameter:
380 .   mapping - new mapping data structure
381 
382     Notes:
383     the block size of the IS determines the block size of the mapping
384     Level: advanced
385 
386 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()`
387 @*/
388 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
389 {
390   PetscInt       n,bs;
391   const PetscInt *indices;
392   MPI_Comm       comm;
393   PetscBool      isblock;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(is,IS_CLASSID,1);
397   PetscValidPointer(mapping,2);
398 
399   PetscCall(PetscObjectGetComm((PetscObject)is,&comm));
400   PetscCall(ISGetLocalSize(is,&n));
401   PetscCall(PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock));
402   if (!isblock) {
403     PetscCall(ISGetIndices(is,&indices));
404     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping));
405     PetscCall(ISRestoreIndices(is,&indices));
406   } else {
407     PetscCall(ISGetBlockSize(is,&bs));
408     PetscCall(ISBlockGetIndices(is,&indices));
409     PetscCall(ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping));
410     PetscCall(ISBlockRestoreIndices(is,&indices));
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 /*@C
416     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
417     ordering and a global parallel ordering.
418 
419     Collective
420 
421     Input Parameters:
422 +   sf - star forest mapping contiguous local indices to (rank, offset)
423 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
424 
425     Output Parameter:
426 .   mapping - new mapping data structure
427 
428     Level: advanced
429 
430     Notes:
431     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
432 
433 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`
434 @*/
435 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
436 {
437   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
438   MPI_Comm       comm;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
442   PetscValidPointer(mapping,3);
443   PetscCall(PetscObjectGetComm((PetscObject)sf,&comm));
444   PetscCall(PetscSFGetGraph(sf,&nroots,&nleaves,NULL,NULL));
445   if (start == PETSC_DECIDE) {
446     start = 0;
447     PetscCallMPI(MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm));
448   } else PetscCheck(start >= 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
449   PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal));
450   ++maxlocal;
451   PetscCall(PetscMalloc1(nroots,&globals));
452   PetscCall(PetscMalloc1(maxlocal,&ltog));
453   for (i=0; i<nroots; i++) globals[i] = start + i;
454   for (i=0; i<maxlocal; i++) ltog[i] = -1;
455   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
456   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
457   PetscCall(ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping));
458   PetscCall(PetscFree(globals));
459   PetscFunctionReturn(0);
460 }
461 
462 /*@
463     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
464 
465     Not collective
466 
467     Input Parameters:
468 +   mapping - mapping data structure
469 -   bs - the blocksize
470 
471     Level: advanced
472 
473 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
474 @*/
475 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
476 {
477   PetscInt       *nid;
478   const PetscInt *oid;
479   PetscInt       i,cn,on,obs,nn;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
483   PetscCheck(bs >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
484   if (bs == mapping->bs) PetscFunctionReturn(0);
485   on  = mapping->n;
486   obs = mapping->bs;
487   oid = mapping->indices;
488   nn  = (on*obs)/bs;
489   PetscCheck((on*obs)%bs == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
490 
491   PetscCall(PetscMalloc1(nn,&nid));
492   PetscCall(ISLocalToGlobalMappingGetIndices(mapping,&oid));
493   for (i=0;i<nn;i++) {
494     PetscInt j;
495     for (j=0,cn=0;j<bs-1;j++) {
496       if (oid[i*bs+j] < 0) { cn++; continue; }
497       PetscCheck(oid[i*bs+j] == oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
498     }
499     if (oid[i*bs+j] < 0) cn++;
500     if (cn) {
501       PetscCheck(cn == bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
502       nid[i] = -1;
503     } else {
504       nid[i] = oid[i*bs]/bs;
505     }
506   }
507   PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping,&oid));
508 
509   mapping->n           = nn;
510   mapping->bs          = bs;
511   PetscCall(PetscFree(mapping->indices));
512   mapping->indices     = nid;
513   mapping->globalstart = 0;
514   mapping->globalend   = 0;
515 
516   /* reset the cached information */
517   PetscCall(PetscFree(mapping->info_procs));
518   PetscCall(PetscFree(mapping->info_numprocs));
519   if (mapping->info_indices) {
520     PetscInt i;
521 
522     PetscCall(PetscFree((mapping->info_indices)[0]));
523     for (i=1; i<mapping->info_nproc; i++) {
524       PetscCall(PetscFree(mapping->info_indices[i]));
525     }
526     PetscCall(PetscFree(mapping->info_indices));
527   }
528   mapping->info_cached = PETSC_FALSE;
529 
530   if (mapping->ops->destroy) PetscCall((*mapping->ops->destroy)(mapping));
531   PetscFunctionReturn(0);
532 }
533 
534 /*@
535     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
536     ordering and a global parallel ordering.
537 
538     Not Collective
539 
540     Input Parameters:
541 .   mapping - mapping data structure
542 
543     Output Parameter:
544 .   bs - the blocksize
545 
546     Level: advanced
547 
548 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
549 @*/
550 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
551 {
552   PetscFunctionBegin;
553   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
554   *bs = mapping->bs;
555   PetscFunctionReturn(0);
556 }
557 
558 /*@
559     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
560     ordering and a global parallel ordering.
561 
562     Not Collective, but communicator may have more than one process
563 
564     Input Parameters:
565 +   comm - MPI communicator
566 .   bs - the block size
567 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
568 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
569 -   mode - see PetscCopyMode
570 
571     Output Parameter:
572 .   mapping - new mapping data structure
573 
574     Notes:
575     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
576 
577     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
578     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
579     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
580 
581     Level: advanced
582 
583 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
584           `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
585 @*/
586 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
587 {
588   PetscInt       *in;
589 
590   PetscFunctionBegin;
591   if (n) PetscValidIntPointer(indices,4);
592   PetscValidPointer(mapping,6);
593 
594   *mapping = NULL;
595   PetscCall(ISInitializePackage());
596 
597   PetscCall(PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView));
598   (*mapping)->n  = n;
599   (*mapping)->bs = bs;
600   if (mode == PETSC_COPY_VALUES) {
601     PetscCall(PetscMalloc1(n,&in));
602     PetscCall(PetscArraycpy(in,indices,n));
603     (*mapping)->indices = in;
604     (*mapping)->dealloc_indices = PETSC_TRUE;
605     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
606   } else if (mode == PETSC_OWN_POINTER) {
607     (*mapping)->indices = (PetscInt*)indices;
608     (*mapping)->dealloc_indices = PETSC_TRUE;
609     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
610   } else if (mode == PETSC_USE_POINTER) {
611     (*mapping)->indices = (PetscInt*)indices;
612   }
613   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
614   PetscFunctionReturn(0);
615 }
616 
617 PetscFunctionList ISLocalToGlobalMappingList = NULL;
618 
619 /*@
620    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
621 
622    Not collective
623 
624    Input Parameters:
625 .  mapping - mapping data structure
626 
627    Level: advanced
628 
629 @*/
630 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
631 {
632   char                       type[256];
633   ISLocalToGlobalMappingType defaulttype = "Not set";
634   PetscBool                  flg;
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
638   PetscCall(ISLocalToGlobalMappingRegisterAll());
639   PetscObjectOptionsBegin((PetscObject)mapping);
640   PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg));
641   if (flg) PetscCall(ISLocalToGlobalMappingSetType(mapping,type));
642   PetscOptionsEnd();
643   PetscFunctionReturn(0);
644 }
645 
646 /*@
647    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
648    ordering and a global parallel ordering.
649 
650    Note Collective
651 
652    Input Parameters:
653 .  mapping - mapping data structure
654 
655    Level: advanced
656 
657 .seealso: `ISLocalToGlobalMappingCreate()`
658 @*/
659 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
660 {
661   PetscFunctionBegin;
662   if (!*mapping) PetscFunctionReturn(0);
663   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
664   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
665   if ((*mapping)->dealloc_indices) {
666     PetscCall(PetscFree((*mapping)->indices));
667   }
668   PetscCall(PetscFree((*mapping)->info_procs));
669   PetscCall(PetscFree((*mapping)->info_numprocs));
670   if ((*mapping)->info_indices) {
671     PetscInt i;
672 
673     PetscCall(PetscFree(((*mapping)->info_indices)[0]));
674     for (i=1; i<(*mapping)->info_nproc; i++) {
675       PetscCall(PetscFree(((*mapping)->info_indices)[i]));
676     }
677     PetscCall(PetscFree((*mapping)->info_indices));
678   }
679   if ((*mapping)->info_nodei) {
680     PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
681   }
682   PetscCall(PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei));
683   if ((*mapping)->ops->destroy) {
684     PetscCall((*(*mapping)->ops->destroy)(*mapping));
685   }
686   PetscCall(PetscHeaderDestroy(mapping));
687   *mapping = NULL;
688   PetscFunctionReturn(0);
689 }
690 
691 /*@
692     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
693     a new index set using the global numbering defined in an ISLocalToGlobalMapping
694     context.
695 
696     Collective on is
697 
698     Input Parameters:
699 +   mapping - mapping between local and global numbering
700 -   is - index set in local numbering
701 
702     Output Parameters:
703 .   newis - index set in global numbering
704 
705     Notes:
706     The output IS will have the same communicator of the input IS.
707 
708     Level: advanced
709 
710 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
711           `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()`
712 @*/
713 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
714 {
715   PetscInt       n,*idxout;
716   const PetscInt *idxin;
717 
718   PetscFunctionBegin;
719   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
720   PetscValidHeaderSpecific(is,IS_CLASSID,2);
721   PetscValidPointer(newis,3);
722 
723   PetscCall(ISGetLocalSize(is,&n));
724   PetscCall(ISGetIndices(is,&idxin));
725   PetscCall(PetscMalloc1(n,&idxout));
726   PetscCall(ISLocalToGlobalMappingApply(mapping,n,idxin,idxout));
727   PetscCall(ISRestoreIndices(is,&idxin));
728   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis));
729   PetscFunctionReturn(0);
730 }
731 
732 /*@
733    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
734    and converts them to the global numbering.
735 
736    Not collective
737 
738    Input Parameters:
739 +  mapping - the local to global mapping context
740 .  N - number of integers
741 -  in - input indices in local numbering
742 
743    Output Parameter:
744 .  out - indices in global numbering
745 
746    Notes:
747    The in and out array parameters may be identical.
748 
749    Level: advanced
750 
751 .seealso: `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
752           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
753           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
754 
755 @*/
756 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
757 {
758   PetscInt i,bs,Nmax;
759 
760   PetscFunctionBegin;
761   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
762   bs   = mapping->bs;
763   Nmax = bs*mapping->n;
764   if (bs == 1) {
765     const PetscInt *idx = mapping->indices;
766     for (i=0; i<N; i++) {
767       if (in[i] < 0) {
768         out[i] = in[i];
769         continue;
770       }
771       PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
772       out[i] = idx[in[i]];
773     }
774   } else {
775     const PetscInt *idx = mapping->indices;
776     for (i=0; i<N; i++) {
777       if (in[i] < 0) {
778         out[i] = in[i];
779         continue;
780       }
781       PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
782       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
783     }
784   }
785   PetscFunctionReturn(0);
786 }
787 
788 /*@
789    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
790 
791    Not collective
792 
793    Input Parameters:
794 +  mapping - the local to global mapping context
795 .  N - number of integers
796 -  in - input indices in local block numbering
797 
798    Output Parameter:
799 .  out - indices in global block numbering
800 
801    Notes:
802    The in and out array parameters may be identical.
803 
804    Example:
805      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
806      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
807 
808    Level: advanced
809 
810 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
811           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
812           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
813 
814 @*/
815 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
816 {
817   PetscInt       i,Nmax;
818   const PetscInt *idx;
819 
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
822   Nmax = mapping->n;
823   idx = mapping->indices;
824   for (i=0; i<N; i++) {
825     if (in[i] < 0) {
826       out[i] = in[i];
827       continue;
828     }
829     PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
830     out[i] = idx[in[i]];
831   }
832   PetscFunctionReturn(0);
833 }
834 
835 /*@
836     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
837     specified with a global numbering.
838 
839     Not collective
840 
841     Input Parameters:
842 +   mapping - mapping between local and global numbering
843 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
844            IS_GTOLM_DROP - drops the indices with no local value from the output list
845 .   n - number of global indices to map
846 -   idx - global indices to map
847 
848     Output Parameters:
849 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
850 -   idxout - local index of each global index, one must pass in an array long enough
851              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
852              idxout == NULL to determine the required length (returned in nout)
853              and then allocate the required space and call ISGlobalToLocalMappingApply()
854              a second time to set the values.
855 
856     Notes:
857     Either nout or idxout may be NULL. idx and idxout may be identical.
858 
859     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
860     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
861     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
862 
863     Level: advanced
864 
865     Developer Note: The manual page states that idx and idxout may be identical but the calling
866        sequence declares idx as const so it cannot be the same as idxout.
867 
868 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`,
869           `ISLocalToGlobalMappingDestroy()`
870 @*/
871 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
872 {
873   PetscFunctionBegin;
874   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
875   if (!mapping->data) {
876     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
877   }
878   PetscCall((*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout));
879   PetscFunctionReturn(0);
880 }
881 
882 /*@
883     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
884     a new index set using the local numbering defined in an ISLocalToGlobalMapping
885     context.
886 
887     Not collective
888 
889     Input Parameters:
890 +   mapping - mapping between local and global numbering
891 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
892            IS_GTOLM_DROP - drops the indices with no local value from the output list
893 -   is - index set in global numbering
894 
895     Output Parameters:
896 .   newis - index set in local numbering
897 
898     Notes:
899     The output IS will be sequential, as it encodes a purely local operation
900 
901     Level: advanced
902 
903 .seealso: `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
904           `ISLocalToGlobalMappingDestroy()`
905 @*/
906 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
907 {
908   PetscInt       n,nout,*idxout;
909   const PetscInt *idxin;
910 
911   PetscFunctionBegin;
912   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
913   PetscValidHeaderSpecific(is,IS_CLASSID,3);
914   PetscValidPointer(newis,4);
915 
916   PetscCall(ISGetLocalSize(is,&n));
917   PetscCall(ISGetIndices(is,&idxin));
918   if (type == IS_GTOLM_MASK) {
919     PetscCall(PetscMalloc1(n,&idxout));
920   } else {
921     PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL));
922     PetscCall(PetscMalloc1(nout,&idxout));
923   }
924   PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout));
925   PetscCall(ISRestoreIndices(is,&idxin));
926   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis));
927   PetscFunctionReturn(0);
928 }
929 
930 /*@
931     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
932     specified with a block global numbering.
933 
934     Not collective
935 
936     Input Parameters:
937 +   mapping - mapping between local and global numbering
938 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
939            IS_GTOLM_DROP - drops the indices with no local value from the output list
940 .   n - number of global indices to map
941 -   idx - global indices to map
942 
943     Output Parameters:
944 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
945 -   idxout - local index of each global index, one must pass in an array long enough
946              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
947              idxout == NULL to determine the required length (returned in nout)
948              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
949              a second time to set the values.
950 
951     Notes:
952     Either nout or idxout may be NULL. idx and idxout may be identical.
953 
954     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
955     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
956     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
957 
958     Level: advanced
959 
960     Developer Note: The manual page states that idx and idxout may be identical but the calling
961        sequence declares idx as const so it cannot be the same as idxout.
962 
963 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
964           `ISLocalToGlobalMappingDestroy()`
965 @*/
966 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
967                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
968 {
969   PetscFunctionBegin;
970   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
971   if (!mapping->data) {
972     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
973   }
974   PetscCall((*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout));
975   PetscFunctionReturn(0);
976 }
977 
978 /*@C
979     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
980      each index shared by more than one processor
981 
982     Collective on ISLocalToGlobalMapping
983 
984     Input Parameter:
985 .   mapping - the mapping from local to global indexing
986 
987     Output Parameters:
988 +   nproc - number of processors that are connected to this one
989 .   proc - neighboring processors
990 .   numproc - number of indices for each subdomain (processor)
991 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
992 
993     Level: advanced
994 
995     Fortran Usage:
996 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
997 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
998           PetscInt indices[nproc][numprocmax],ierr)
999         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1000         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1001 
1002 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1003           `ISLocalToGlobalMappingRestoreInfo()`
1004 @*/
1005 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1006 {
1007   PetscFunctionBegin;
1008   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1009   if (mapping->info_cached) {
1010     *nproc    = mapping->info_nproc;
1011     *procs    = mapping->info_procs;
1012     *numprocs = mapping->info_numprocs;
1013     *indices  = mapping->info_indices;
1014   } else {
1015     PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices));
1016   }
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1021 {
1022   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1023   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1024   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1025   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1026   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1027   PetscInt       first_procs,first_numprocs,*first_indices;
1028   MPI_Request    *recv_waits,*send_waits;
1029   MPI_Status     recv_status,*send_status,*recv_statuses;
1030   MPI_Comm       comm;
1031   PetscBool      debug = PETSC_FALSE;
1032 
1033   PetscFunctionBegin;
1034   PetscCall(PetscObjectGetComm((PetscObject)mapping,&comm));
1035   PetscCallMPI(MPI_Comm_size(comm,&size));
1036   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1037   if (size == 1) {
1038     *nproc         = 0;
1039     *procs         = NULL;
1040     PetscCall(PetscNew(numprocs));
1041     (*numprocs)[0] = 0;
1042     PetscCall(PetscNew(indices));
1043     (*indices)[0]  = NULL;
1044     /* save info for reuse */
1045     mapping->info_nproc = *nproc;
1046     mapping->info_procs = *procs;
1047     mapping->info_numprocs = *numprocs;
1048     mapping->info_indices = *indices;
1049     mapping->info_cached = PETSC_TRUE;
1050     PetscFunctionReturn(0);
1051   }
1052 
1053   PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL));
1054 
1055   /*
1056     Notes on ISLocalToGlobalMappingGetBlockInfo
1057 
1058     globally owned node - the nodes that have been assigned to this processor in global
1059            numbering, just for this routine.
1060 
1061     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1062            boundary (i.e. is has more than one local owner)
1063 
1064     locally owned node - node that exists on this processors subdomain
1065 
1066     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1067            local subdomain
1068   */
1069   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag1));
1070   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag2));
1071   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag3));
1072 
1073   for (i=0; i<n; i++) {
1074     if (lindices[i] > max) max = lindices[i];
1075   }
1076   PetscCall(MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm));
1077   Ng++;
1078   PetscCallMPI(MPI_Comm_size(comm,&size));
1079   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1080   scale  = Ng/size + 1;
1081   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1082   rstart = scale*rank;
1083 
1084   /* determine ownership ranges of global indices */
1085   PetscCall(PetscMalloc1(2*size,&nprocs));
1086   PetscCall(PetscArrayzero(nprocs,2*size));
1087 
1088   /* determine owners of each local node  */
1089   PetscCall(PetscMalloc1(n,&owner));
1090   for (i=0; i<n; i++) {
1091     proc             = lindices[i]/scale; /* processor that globally owns this index */
1092     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1093     owner[i]         = proc;
1094     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1095   }
1096   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1097   PetscCall(PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends));
1098 
1099   /* inform other processors of number of messages and max length*/
1100   PetscCall(PetscMaxSum(comm,nprocs,&nmax,&nrecvs));
1101   PetscCall(PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs));
1102 
1103   /* post receives for owned rows */
1104   PetscCall(PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs));
1105   PetscCall(PetscMalloc1(nrecvs+1,&recv_waits));
1106   for (i=0; i<nrecvs; i++) {
1107     PetscCallMPI(MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i));
1108   }
1109 
1110   /* pack messages containing lists of local nodes to owners */
1111   PetscCall(PetscMalloc1(2*n+1,&sends));
1112   PetscCall(PetscMalloc1(size+1,&starts));
1113   starts[0] = 0;
1114   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1115   for (i=0; i<n; i++) {
1116     sends[starts[owner[i]]++] = lindices[i];
1117     sends[starts[owner[i]]++] = i;
1118   }
1119   PetscCall(PetscFree(owner));
1120   starts[0] = 0;
1121   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1122 
1123   /* send the messages */
1124   PetscCall(PetscMalloc1(nsends+1,&send_waits));
1125   PetscCall(PetscMalloc1(nsends+1,&dest));
1126   cnt = 0;
1127   for (i=0; i<size; i++) {
1128     if (nprocs[2*i]) {
1129       PetscCallMPI(MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt));
1130       dest[cnt] = i;
1131       cnt++;
1132     }
1133   }
1134   PetscCall(PetscFree(starts));
1135 
1136   /* wait on receives */
1137   PetscCall(PetscMalloc1(nrecvs+1,&source));
1138   PetscCall(PetscMalloc1(nrecvs+1,&len));
1139   cnt  = nrecvs;
1140   PetscCall(PetscCalloc1(ng+1,&nownedsenders));
1141   while (cnt) {
1142     PetscCallMPI(MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status));
1143     /* unpack receives into our local space */
1144     PetscCallMPI(MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]));
1145     source[imdex] = recv_status.MPI_SOURCE;
1146     len[imdex]    = len[imdex]/2;
1147     /* count how many local owners for each of my global owned indices */
1148     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1149     cnt--;
1150   }
1151   PetscCall(PetscFree(recv_waits));
1152 
1153   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1154   nowned  = 0;
1155   nownedm = 0;
1156   for (i=0; i<ng; i++) {
1157     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1158   }
1159 
1160   /* create single array to contain rank of all local owners of each globally owned index */
1161   PetscCall(PetscMalloc1(nownedm+1,&ownedsenders));
1162   PetscCall(PetscMalloc1(ng+1,&starts));
1163   starts[0] = 0;
1164   for (i=1; i<ng; i++) {
1165     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1166     else starts[i] = starts[i-1];
1167   }
1168 
1169   /* for each nontrivial globally owned node list all arriving processors */
1170   for (i=0; i<nrecvs; i++) {
1171     for (j=0; j<len[i]; j++) {
1172       node = recvs[2*i*nmax+2*j]-rstart;
1173       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1174     }
1175   }
1176 
1177   if (debug) { /* -----------------------------------  */
1178     starts[0] = 0;
1179     for (i=1; i<ng; i++) {
1180       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1181       else starts[i] = starts[i-1];
1182     }
1183     for (i=0; i<ng; i++) {
1184       if (nownedsenders[i] > 1) {
1185         PetscCall(PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart));
1186         for (j=0; j<nownedsenders[i]; j++) {
1187           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]));
1188         }
1189         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1190       }
1191     }
1192     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1193   } /* -----------------------------------  */
1194 
1195   /* wait on original sends */
1196   if (nsends) {
1197     PetscCall(PetscMalloc1(nsends,&send_status));
1198     PetscCallMPI(MPI_Waitall(nsends,send_waits,send_status));
1199     PetscCall(PetscFree(send_status));
1200   }
1201   PetscCall(PetscFree(send_waits));
1202   PetscCall(PetscFree(sends));
1203   PetscCall(PetscFree(nprocs));
1204 
1205   /* pack messages to send back to local owners */
1206   starts[0] = 0;
1207   for (i=1; i<ng; i++) {
1208     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1209     else starts[i] = starts[i-1];
1210   }
1211   nsends2 = nrecvs;
1212   PetscCall(PetscMalloc1(nsends2+1,&nprocs)); /* length of each message */
1213   for (i=0; i<nrecvs; i++) {
1214     nprocs[i] = 1;
1215     for (j=0; j<len[i]; j++) {
1216       node = recvs[2*i*nmax+2*j]-rstart;
1217       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1218     }
1219   }
1220   nt = 0;
1221   for (i=0; i<nsends2; i++) nt += nprocs[i];
1222 
1223   PetscCall(PetscMalloc1(nt+1,&sends2));
1224   PetscCall(PetscMalloc1(nsends2+1,&starts2));
1225 
1226   starts2[0] = 0;
1227   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1228   /*
1229      Each message is 1 + nprocs[i] long, and consists of
1230        (0) the number of nodes being sent back
1231        (1) the local node number,
1232        (2) the number of processors sharing it,
1233        (3) the processors sharing it
1234   */
1235   for (i=0; i<nsends2; i++) {
1236     cnt = 1;
1237     sends2[starts2[i]] = 0;
1238     for (j=0; j<len[i]; j++) {
1239       node = recvs[2*i*nmax+2*j]-rstart;
1240       if (nownedsenders[node] > 1) {
1241         sends2[starts2[i]]++;
1242         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1243         sends2[starts2[i]+cnt++] = nownedsenders[node];
1244         PetscCall(PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]));
1245         cnt += nownedsenders[node];
1246       }
1247     }
1248   }
1249 
1250   /* receive the message lengths */
1251   nrecvs2 = nsends;
1252   PetscCall(PetscMalloc1(nrecvs2+1,&lens2));
1253   PetscCall(PetscMalloc1(nrecvs2+1,&starts3));
1254   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1255   for (i=0; i<nrecvs2; i++) {
1256     PetscCallMPI(MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i));
1257   }
1258 
1259   /* send the message lengths */
1260   for (i=0; i<nsends2; i++) {
1261     PetscCallMPI(MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm));
1262   }
1263 
1264   /* wait on receives of lens */
1265   if (nrecvs2) {
1266     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1267     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1268     PetscCall(PetscFree(recv_statuses));
1269   }
1270   PetscCall(PetscFree(recv_waits));
1271 
1272   starts3[0] = 0;
1273   nt         = 0;
1274   for (i=0; i<nrecvs2-1; i++) {
1275     starts3[i+1] = starts3[i] + lens2[i];
1276     nt          += lens2[i];
1277   }
1278   if (nrecvs2) nt += lens2[nrecvs2-1];
1279 
1280   PetscCall(PetscMalloc1(nt+1,&recvs2));
1281   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1282   for (i=0; i<nrecvs2; i++) {
1283     PetscCallMPI(MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i));
1284   }
1285 
1286   /* send the messages */
1287   PetscCall(PetscMalloc1(nsends2+1,&send_waits));
1288   for (i=0; i<nsends2; i++) {
1289     PetscCallMPI(MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i));
1290   }
1291 
1292   /* wait on receives */
1293   if (nrecvs2) {
1294     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1295     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1296     PetscCall(PetscFree(recv_statuses));
1297   }
1298   PetscCall(PetscFree(recv_waits));
1299   PetscCall(PetscFree(nprocs));
1300 
1301   if (debug) { /* -----------------------------------  */
1302     cnt = 0;
1303     for (i=0; i<nrecvs2; i++) {
1304       nt = recvs2[cnt++];
1305       for (j=0; j<nt; j++) {
1306         PetscCall(PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]));
1307         for (k=0; k<recvs2[cnt+1]; k++) {
1308           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]));
1309         }
1310         cnt += 2 + recvs2[cnt+1];
1311         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1312       }
1313     }
1314     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1315   } /* -----------------------------------  */
1316 
1317   /* count number subdomains for each local node */
1318   PetscCall(PetscCalloc1(size,&nprocs));
1319   cnt  = 0;
1320   for (i=0; i<nrecvs2; i++) {
1321     nt = recvs2[cnt++];
1322     for (j=0; j<nt; j++) {
1323       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1324       cnt += 2 + recvs2[cnt+1];
1325     }
1326   }
1327   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1328   *nproc    = nt;
1329   PetscCall(PetscMalloc1(nt+1,procs));
1330   PetscCall(PetscMalloc1(nt+1,numprocs));
1331   PetscCall(PetscMalloc1(nt+1,indices));
1332   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1333   PetscCall(PetscMalloc1(size,&bprocs));
1334   cnt  = 0;
1335   for (i=0; i<size; i++) {
1336     if (nprocs[i] > 0) {
1337       bprocs[i]        = cnt;
1338       (*procs)[cnt]    = i;
1339       (*numprocs)[cnt] = nprocs[i];
1340       PetscCall(PetscMalloc1(nprocs[i],&(*indices)[cnt]));
1341       cnt++;
1342     }
1343   }
1344 
1345   /* make the list of subdomains for each nontrivial local node */
1346   PetscCall(PetscArrayzero(*numprocs,nt));
1347   cnt  = 0;
1348   for (i=0; i<nrecvs2; i++) {
1349     nt = recvs2[cnt++];
1350     for (j=0; j<nt; j++) {
1351       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1352       cnt += 2 + recvs2[cnt+1];
1353     }
1354   }
1355   PetscCall(PetscFree(bprocs));
1356   PetscCall(PetscFree(recvs2));
1357 
1358   /* sort the node indexing by their global numbers */
1359   nt = *nproc;
1360   for (i=0; i<nt; i++) {
1361     PetscCall(PetscMalloc1((*numprocs)[i],&tmp));
1362     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1363     PetscCall(PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]));
1364     PetscCall(PetscFree(tmp));
1365   }
1366 
1367   if (debug) { /* -----------------------------------  */
1368     nt = *nproc;
1369     for (i=0; i<nt; i++) {
1370       PetscCall(PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]));
1371       for (j=0; j<(*numprocs)[i]; j++) {
1372         PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]));
1373       }
1374       PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1375     }
1376     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1377   } /* -----------------------------------  */
1378 
1379   /* wait on sends */
1380   if (nsends2) {
1381     PetscCall(PetscMalloc1(nsends2,&send_status));
1382     PetscCallMPI(MPI_Waitall(nsends2,send_waits,send_status));
1383     PetscCall(PetscFree(send_status));
1384   }
1385 
1386   PetscCall(PetscFree(starts3));
1387   PetscCall(PetscFree(dest));
1388   PetscCall(PetscFree(send_waits));
1389 
1390   PetscCall(PetscFree(nownedsenders));
1391   PetscCall(PetscFree(ownedsenders));
1392   PetscCall(PetscFree(starts));
1393   PetscCall(PetscFree(starts2));
1394   PetscCall(PetscFree(lens2));
1395 
1396   PetscCall(PetscFree(source));
1397   PetscCall(PetscFree(len));
1398   PetscCall(PetscFree(recvs));
1399   PetscCall(PetscFree(nprocs));
1400   PetscCall(PetscFree(sends2));
1401 
1402   /* put the information about myself as the first entry in the list */
1403   first_procs    = (*procs)[0];
1404   first_numprocs = (*numprocs)[0];
1405   first_indices  = (*indices)[0];
1406   for (i=0; i<*nproc; i++) {
1407     if ((*procs)[i] == rank) {
1408       (*procs)[0]    = (*procs)[i];
1409       (*numprocs)[0] = (*numprocs)[i];
1410       (*indices)[0]  = (*indices)[i];
1411       (*procs)[i]    = first_procs;
1412       (*numprocs)[i] = first_numprocs;
1413       (*indices)[i]  = first_indices;
1414       break;
1415     }
1416   }
1417 
1418   /* save info for reuse */
1419   mapping->info_nproc = *nproc;
1420   mapping->info_procs = *procs;
1421   mapping->info_numprocs = *numprocs;
1422   mapping->info_indices = *indices;
1423   mapping->info_cached = PETSC_TRUE;
1424   PetscFunctionReturn(0);
1425 }
1426 
1427 /*@C
1428     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1429 
1430     Collective on ISLocalToGlobalMapping
1431 
1432     Input Parameter:
1433 .   mapping - the mapping from local to global indexing
1434 
1435     Output Parameters:
1436 +   nproc - number of processors that are connected to this one
1437 .   proc - neighboring processors
1438 .   numproc - number of indices for each processor
1439 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1440 
1441     Level: advanced
1442 
1443 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1444           `ISLocalToGlobalMappingGetInfo()`
1445 @*/
1446 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1447 {
1448   PetscFunctionBegin;
1449   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1450   if (mapping->info_free) {
1451     PetscCall(PetscFree(*numprocs));
1452     if (*indices) {
1453       PetscInt i;
1454 
1455       PetscCall(PetscFree((*indices)[0]));
1456       for (i=1; i<*nproc; i++) {
1457         PetscCall(PetscFree((*indices)[i]));
1458       }
1459       PetscCall(PetscFree(*indices));
1460     }
1461   }
1462   *nproc    = 0;
1463   *procs    = NULL;
1464   *numprocs = NULL;
1465   *indices  = NULL;
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 /*@C
1470     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1471      each index shared by more than one processor
1472 
1473     Collective on ISLocalToGlobalMapping
1474 
1475     Input Parameter:
1476 .   mapping - the mapping from local to global indexing
1477 
1478     Output Parameters:
1479 +   nproc - number of processors that are connected to this one
1480 .   proc - neighboring processors
1481 .   numproc - number of indices for each subdomain (processor)
1482 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1483 
1484     Level: advanced
1485 
1486     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1487 
1488     Fortran Usage:
1489 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1490 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1491           PetscInt indices[nproc][numprocmax],ierr)
1492         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1493         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1494 
1495 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1496           `ISLocalToGlobalMappingRestoreInfo()`
1497 @*/
1498 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1499 {
1500   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs,i,j,k;
1501 
1502   PetscFunctionBegin;
1503   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1504   bs = mapping->bs;
1505   PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices));
1506   if (bs > 1) { /* we need to expand the cached info */
1507     PetscCall(PetscCalloc1(*nproc,&*indices));
1508     PetscCall(PetscCalloc1(*nproc,&*numprocs));
1509     for (i=0; i<*nproc; i++) {
1510       PetscCall(PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]));
1511       for (j=0; j<bnumprocs[i]; j++) {
1512         for (k=0; k<bs; k++) {
1513           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1514         }
1515       }
1516       (*numprocs)[i] = bnumprocs[i]*bs;
1517     }
1518     mapping->info_free = PETSC_TRUE;
1519   } else {
1520     *numprocs = bnumprocs;
1521     *indices  = bindices;
1522   }
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 /*@C
1527     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1528 
1529     Collective on ISLocalToGlobalMapping
1530 
1531     Input Parameter:
1532 .   mapping - the mapping from local to global indexing
1533 
1534     Output Parameters:
1535 +   nproc - number of processors that are connected to this one
1536 .   proc - neighboring processors
1537 .   numproc - number of indices for each processor
1538 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1539 
1540     Level: advanced
1541 
1542 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1543           `ISLocalToGlobalMappingGetInfo()`
1544 @*/
1545 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1546 {
1547   PetscFunctionBegin;
1548   PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices));
1549   PetscFunctionReturn(0);
1550 }
1551 
1552 /*@C
1553     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1554 
1555     Collective on ISLocalToGlobalMapping
1556 
1557     Input Parameter:
1558 .   mapping - the mapping from local to global indexing
1559 
1560     Output Parameters:
1561 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1562 .   count - number of neighboring processors per node
1563 -   indices - indices of processes sharing the node (sorted)
1564 
1565     Level: advanced
1566 
1567     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1568 
1569 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1570           `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()`
1571 @*/
1572 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1573 {
1574   PetscInt       n;
1575 
1576   PetscFunctionBegin;
1577   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1578   PetscCall(ISLocalToGlobalMappingGetSize(mapping,&n));
1579   if (!mapping->info_nodec) {
1580     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1581 
1582     PetscCall(PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei));
1583     PetscCall(ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1584     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1585     m = n;
1586     mapping->info_nodec[n] = 0;
1587     for (i=1;i<n_neigh;i++) {
1588       PetscInt j;
1589 
1590       m += n_shared[i];
1591       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1592     }
1593     if (n) PetscCall(PetscMalloc1(m,&mapping->info_nodei[0]));
1594     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1595     PetscCall(PetscArrayzero(mapping->info_nodec,n));
1596     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1597     for (i=1;i<n_neigh;i++) {
1598       PetscInt j;
1599 
1600       for (j=0;j<n_shared[i];j++) {
1601         PetscInt k = shared[i][j];
1602 
1603         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1604         mapping->info_nodec[k] += 1;
1605       }
1606     }
1607     for (i=0;i<n;i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]));
1608     PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1609   }
1610   if (nnodes)  *nnodes  = n;
1611   if (count)   *count   = mapping->info_nodec;
1612   if (indices) *indices = mapping->info_nodei;
1613   PetscFunctionReturn(0);
1614 }
1615 
1616 /*@C
1617     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1618 
1619     Collective on ISLocalToGlobalMapping
1620 
1621     Input Parameter:
1622 .   mapping - the mapping from local to global indexing
1623 
1624     Output Parameters:
1625 +   nnodes - number of local nodes
1626 .   count - number of neighboring processors per node
1627 -   indices - indices of processes sharing the node (sorted)
1628 
1629     Level: advanced
1630 
1631 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1632           `ISLocalToGlobalMappingGetInfo()`
1633 @*/
1634 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1635 {
1636   PetscFunctionBegin;
1637   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1638   if (nnodes)  *nnodes  = 0;
1639   if (count)   *count   = NULL;
1640   if (indices) *indices = NULL;
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 /*@C
1645    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1646 
1647    Not Collective
1648 
1649    Input Parameter:
1650 . ltog - local to global mapping
1651 
1652    Output Parameter:
1653 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1654 
1655    Level: advanced
1656 
1657    Notes:
1658     ISLocalToGlobalMappingGetSize() returns the length the this array
1659 
1660 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1661 @*/
1662 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1663 {
1664   PetscFunctionBegin;
1665   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1666   PetscValidPointer(array,2);
1667   if (ltog->bs == 1) {
1668     *array = ltog->indices;
1669   } else {
1670     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1671     const PetscInt *ii;
1672 
1673     PetscCall(PetscMalloc1(bs*n,&jj));
1674     *array = jj;
1675     k    = 0;
1676     ii   = ltog->indices;
1677     for (i=0; i<n; i++)
1678       for (j=0; j<bs; j++)
1679         jj[k++] = bs*ii[i] + j;
1680   }
1681   PetscFunctionReturn(0);
1682 }
1683 
1684 /*@C
1685    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1686 
1687    Not Collective
1688 
1689    Input Parameters:
1690 + ltog - local to global mapping
1691 - array - array of indices
1692 
1693    Level: advanced
1694 
1695 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1696 @*/
1697 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1698 {
1699   PetscFunctionBegin;
1700   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1701   PetscValidPointer(array,2);
1702   PetscCheck(ltog->bs != 1 || *array == ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1703 
1704   if (ltog->bs > 1) {
1705     PetscCall(PetscFree(*(void**)array));
1706   }
1707   PetscFunctionReturn(0);
1708 }
1709 
1710 /*@C
1711    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1712 
1713    Not Collective
1714 
1715    Input Parameter:
1716 . ltog - local to global mapping
1717 
1718    Output Parameter:
1719 . array - array of indices
1720 
1721    Level: advanced
1722 
1723 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1724 @*/
1725 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1726 {
1727   PetscFunctionBegin;
1728   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1729   PetscValidPointer(array,2);
1730   *array = ltog->indices;
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 /*@C
1735    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1736 
1737    Not Collective
1738 
1739    Input Parameters:
1740 + ltog - local to global mapping
1741 - array - array of indices
1742 
1743    Level: advanced
1744 
1745 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1746 @*/
1747 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1748 {
1749   PetscFunctionBegin;
1750   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1751   PetscValidPointer(array,2);
1752   PetscCheck(*array == ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1753   *array = NULL;
1754   PetscFunctionReturn(0);
1755 }
1756 
1757 /*@C
1758    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1759 
1760    Not Collective
1761 
1762    Input Parameters:
1763 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1764 . n - number of mappings to concatenate
1765 - ltogs - local to global mappings
1766 
1767    Output Parameter:
1768 . ltogcat - new mapping
1769 
1770    Note: this currently always returns a mapping with block size of 1
1771 
1772    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1773 
1774    Level: advanced
1775 
1776 .seealso: `ISLocalToGlobalMappingCreate()`
1777 @*/
1778 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1779 {
1780   PetscInt       i,cnt,m,*idx;
1781 
1782   PetscFunctionBegin;
1783   PetscCheck(n >= 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1784   if (n > 0) PetscValidPointer(ltogs,3);
1785   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1786   PetscValidPointer(ltogcat,4);
1787   for (cnt=0,i=0; i<n; i++) {
1788     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1789     cnt += m;
1790   }
1791   PetscCall(PetscMalloc1(cnt,&idx));
1792   for (cnt=0,i=0; i<n; i++) {
1793     const PetscInt *subidx;
1794     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1795     PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx));
1796     PetscCall(PetscArraycpy(&idx[cnt],subidx,m));
1797     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx));
1798     cnt += m;
1799   }
1800   PetscCall(ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat));
1801   PetscFunctionReturn(0);
1802 }
1803 
1804 /*MC
1805       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1806                                     used this is good for only small and moderate size problems.
1807 
1808    Options Database Keys:
1809 .   -islocaltoglobalmapping_type basic - select this method
1810 
1811    Level: beginner
1812 
1813 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1814 M*/
1815 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1816 {
1817   PetscFunctionBegin;
1818   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1819   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1820   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1821   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1822   PetscFunctionReturn(0);
1823 }
1824 
1825 /*MC
1826       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1827                                     used this is good for large memory problems.
1828 
1829    Options Database Keys:
1830 .   -islocaltoglobalmapping_type hash - select this method
1831 
1832    Notes:
1833     This is selected automatically for large problems if the user does not set the type.
1834 
1835    Level: beginner
1836 
1837 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1838 M*/
1839 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1840 {
1841   PetscFunctionBegin;
1842   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1843   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1844   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1845   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1846   PetscFunctionReturn(0);
1847 }
1848 
1849 /*@C
1850     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1851 
1852    Not Collective
1853 
1854    Input Parameters:
1855 +  sname - name of a new method
1856 -  routine_create - routine to create method context
1857 
1858    Notes:
1859    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1860 
1861    Sample usage:
1862 .vb
1863    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1864 .ve
1865 
1866    Then, your mapping can be chosen with the procedural interface via
1867 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1868    or at runtime via the option
1869 $     -islocaltoglobalmapping_type my_mapper
1870 
1871    Level: advanced
1872 
1873 .seealso: `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
1874 
1875 @*/
1876 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1877 {
1878   PetscFunctionBegin;
1879   PetscCall(ISInitializePackage());
1880   PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function));
1881   PetscFunctionReturn(0);
1882 }
1883 
1884 /*@C
1885    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1886 
1887    Logically Collective on ISLocalToGlobalMapping
1888 
1889    Input Parameters:
1890 +  ltog - the ISLocalToGlobalMapping object
1891 -  type - a known method
1892 
1893    Options Database Key:
1894 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1895     of available methods (for instance, basic or hash)
1896 
1897    Notes:
1898    See "petsc/include/petscis.h" for available methods
1899 
1900   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1901   then set the ISLocalToGlobalMapping type from the options database rather than by using
1902   this routine.
1903 
1904   Level: intermediate
1905 
1906   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1907   are accessed by ISLocalToGlobalMappingSetType().
1908 
1909 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()`
1910 @*/
1911 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1912 {
1913   PetscBool      match;
1914   PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1915 
1916   PetscFunctionBegin;
1917   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1918   if (type) PetscValidCharPointer(type,2);
1919 
1920   PetscCall(PetscObjectTypeCompare((PetscObject)ltog,type,&match));
1921   if (match) PetscFunctionReturn(0);
1922 
1923   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1924   if (type) {
1925     PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r));
1926     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1927   }
1928   /* Destroy the previous private LTOG context */
1929   if (ltog->ops->destroy) {
1930     PetscCall((*ltog->ops->destroy)(ltog));
1931     ltog->ops->destroy = NULL;
1932   }
1933   PetscCall(PetscObjectChangeTypeName((PetscObject)ltog,type));
1934   if (r) PetscCall((*r)(ltog));
1935   PetscFunctionReturn(0);
1936 }
1937 
1938 /*@C
1939    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1940 
1941    Not Collective
1942 
1943    Input Parameter:
1944 .  ltog - the ISLocalToGlobalMapping object
1945 
1946    Output Parameter:
1947 .  type - the type
1948 
1949    Level: intermediate
1950 
1951 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`
1952 @*/
1953 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1954 {
1955   PetscFunctionBegin;
1956   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1957   PetscValidPointer(type,2);
1958   *type = ((PetscObject)ltog)->type_name;
1959   PetscFunctionReturn(0);
1960 }
1961 
1962 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1963 
1964 /*@C
1965   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1966 
1967   Not Collective
1968 
1969   Level: advanced
1970 
1971 .seealso: `ISRegister()`, `ISLocalToGlobalRegister()`
1972 @*/
1973 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1974 {
1975   PetscFunctionBegin;
1976   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1977   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1978   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
1979   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
1980   PetscFunctionReturn(0);
1981 }
1982