xref: /petsc/src/vec/is/utils/isltog.c (revision 6aad120caa16b1027d343a5f30f73d01448e4dc0)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47 
48   PetscFunctionBeginHot;
49   *pStart = 0;
50   *points = NULL;
51   PetscCall(ISGetLocalSize(pointIS, &numCells));
52   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
53   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
54   *pEnd   = *pStart + numCells;
55   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
56   PetscFunctionReturn(0);
57 }
58 
59 /*@C
60   ISRestorePointRange - Destroys the traversal description
61 
62   Not collective
63 
64   Input Parameters:
65 + pointIS - The IS object
66 . pStart  - The first index, from ISGetPointRange()
67 . pEnd    - One past the last index, from ISGetPointRange()
68 - points  - The indices, from ISGetPointRange()
69 
70   Notes:
71   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
72 $ ISGetPointRange(is, &pStart, &pEnd, &points);
73 $ for (p = pStart; p < pEnd; ++p) {
74 $   const PetscInt point = points ? points[p] : p;
75 $ }
76 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
77 
78   Level: intermediate
79 
80 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
81 @*/
82 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
83 {
84   PetscInt       step = 1;
85   PetscBool      isStride;
86 
87   PetscFunctionBeginHot;
88   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
89   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
90   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
91   PetscFunctionReturn(0);
92 }
93 
94 /*@C
95   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
96 
97   Not collective
98 
99   Input Parameters:
100 + subpointIS - The IS object to be configured
101 . pStar   t  - The first index of the subrange
102 . pEnd       - One past the last index for the subrange
103 - points     - The indices for the entire range, from ISGetPointRange()
104 
105   Output Parameters:
106 . subpointIS - The IS object now configured to be a subrange
107 
108   Notes:
109   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
110 
111   Level: intermediate
112 
113 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
114 @*/
115 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
116 {
117   PetscFunctionBeginHot;
118   if (points) {
119     PetscCall(ISSetType(subpointIS, ISGENERAL));
120     PetscCall(ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER));
121   } else {
122     PetscCall(ISSetType(subpointIS, ISSTRIDE));
123     PetscCall(ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /* -----------------------------------------------------------------------------------------*/
129 
130 /*
131     Creates the global mapping information in the ISLocalToGlobalMapping structure
132 
133     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
134 */
135 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
136 {
137   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
138 
139   PetscFunctionBegin;
140   if (mapping->data) PetscFunctionReturn(0);
141   end   = 0;
142   start = PETSC_MAX_INT;
143 
144   for (i=0; i<n; i++) {
145     if (idx[i] < 0) continue;
146     if (idx[i] < start) start = idx[i];
147     if (idx[i] > end)   end   = idx[i];
148   }
149   if (start > end) {start = 0; end = -1;}
150   mapping->globalstart = start;
151   mapping->globalend   = end;
152   if (!((PetscObject)mapping)->type_name) {
153     if ((end - start) > PetscMax(4*n,1000000)) {
154       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH));
155     } else {
156       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC));
157     }
158   }
159   if (mapping->ops->globaltolocalmappingsetup) PetscCall((*mapping->ops->globaltolocalmappingsetup)(mapping));
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
164 {
165   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
166   ISLocalToGlobalMapping_Basic *map;
167 
168   PetscFunctionBegin;
169   start            = mapping->globalstart;
170   end              = mapping->globalend;
171   PetscCall(PetscNew(&map));
172   PetscCall(PetscMalloc1(end-start+2,&globals));
173   map->globals     = globals;
174   for (i=0; i<end-start+1; i++) globals[i] = -1;
175   for (i=0; i<n; i++) {
176     if (idx[i] < 0) continue;
177     globals[idx[i] - start] = i;
178   }
179   mapping->data = (void*)map;
180   PetscCall(PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt)));
181   PetscFunctionReturn(0);
182 }
183 
184 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
185 {
186   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
187   ISLocalToGlobalMapping_Hash *map;
188 
189   PetscFunctionBegin;
190   PetscCall(PetscNew(&map));
191   PetscCall(PetscHMapICreate(&map->globalht));
192   for (i=0; i<n; i++) {
193     if (idx[i] < 0) continue;
194     PetscCall(PetscHMapISet(map->globalht,idx[i],i));
195   }
196   mapping->data = (void*)map;
197   PetscCall(PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt)));
198   PetscFunctionReturn(0);
199 }
200 
201 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
202 {
203   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
204 
205   PetscFunctionBegin;
206   if (!map) PetscFunctionReturn(0);
207   PetscCall(PetscFree(map->globals));
208   PetscCall(PetscFree(mapping->data));
209   PetscFunctionReturn(0);
210 }
211 
212 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
213 {
214   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
215 
216   PetscFunctionBegin;
217   if (!map) PetscFunctionReturn(0);
218   PetscCall(PetscHMapIDestroy(&map->globalht));
219   PetscCall(PetscFree(mapping->data));
220   PetscFunctionReturn(0);
221 }
222 
223 #define GTOLTYPE _Basic
224 #define GTOLNAME _Basic
225 #define GTOLBS mapping->bs
226 #define GTOL(g, local) do {                  \
227     local = map->globals[g/bs - start];      \
228     if (local >= 0) local = bs*local + (g % bs); \
229   } while (0)
230 
231 #include <../src/vec/is/utils/isltog.h>
232 
233 #define GTOLTYPE _Basic
234 #define GTOLNAME Block_Basic
235 #define GTOLBS 1
236 #define GTOL(g, local) do {                  \
237     local = map->globals[g - start];         \
238   } while (0)
239 #include <../src/vec/is/utils/isltog.h>
240 
241 #define GTOLTYPE _Hash
242 #define GTOLNAME _Hash
243 #define GTOLBS mapping->bs
244 #define GTOL(g, local) do {                         \
245     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
246     if (local >= 0) local = bs*local + (g % bs);   \
247    } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME Block_Hash
252 #define GTOLBS 1
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g,&local);    \
255   } while (0)
256 #include <../src/vec/is/utils/isltog.h>
257 
258 /*@
259     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
260 
261     Not Collective
262 
263     Input Parameter:
264 .   ltog - local to global mapping
265 
266     Output Parameter:
267 .   nltog - the duplicated local to global mapping
268 
269     Level: advanced
270 
271 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
272 @*/
273 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
274 {
275   ISLocalToGlobalMappingType l2gtype;
276 
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
279   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog));
280   PetscCall(ISLocalToGlobalMappingGetType(ltog,&l2gtype));
281   PetscCall(ISLocalToGlobalMappingSetType(*nltog,l2gtype));
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
287 
288     Not Collective
289 
290     Input Parameter:
291 .   ltog - local to global mapping
292 
293     Output Parameter:
294 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
295 
296     Level: advanced
297 
298 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
299 @*/
300 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
301 {
302   PetscFunctionBegin;
303   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
304   PetscValidIntPointer(n,2);
305   *n = mapping->bs*mapping->n;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@C
310    ISLocalToGlobalMappingViewFromOptions - View from Options
311 
312    Collective on ISLocalToGlobalMapping
313 
314    Input Parameters:
315 +  A - the local to global mapping object
316 .  obj - Optional object
317 -  name - command line option
318 
319    Level: intermediate
320 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
321 @*/
322 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
323 {
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
326   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
327   PetscFunctionReturn(0);
328 }
329 
330 /*@C
331     ISLocalToGlobalMappingView - View a local to global mapping
332 
333     Not Collective
334 
335     Input Parameters:
336 +   ltog - local to global mapping
337 -   viewer - viewer
338 
339     Level: advanced
340 
341 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
344 {
345   PetscInt       i;
346   PetscMPIInt    rank;
347   PetscBool      iascii;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
351   if (!viewer) {
352     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer));
353   }
354   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
355 
356   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank));
357   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
358   if (iascii) {
359     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer));
360     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
361     for (i=0; i<mapping->n; i++) {
362       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]));
363     }
364     PetscCall(PetscViewerFlush(viewer));
365     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
366   }
367   PetscFunctionReturn(0);
368 }
369 
370 /*@
371     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
372     ordering and a global parallel ordering.
373 
374     Not collective
375 
376     Input Parameter:
377 .   is - index set containing the global numbers for each local number
378 
379     Output Parameter:
380 .   mapping - new mapping data structure
381 
382     Notes:
383     the block size of the IS determines the block size of the mapping
384     Level: advanced
385 
386 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
387 @*/
388 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
389 {
390   PetscInt       n,bs;
391   const PetscInt *indices;
392   MPI_Comm       comm;
393   PetscBool      isblock;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(is,IS_CLASSID,1);
397   PetscValidPointer(mapping,2);
398 
399   PetscCall(PetscObjectGetComm((PetscObject)is,&comm));
400   PetscCall(ISGetLocalSize(is,&n));
401   PetscCall(PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock));
402   if (!isblock) {
403     PetscCall(ISGetIndices(is,&indices));
404     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping));
405     PetscCall(ISRestoreIndices(is,&indices));
406   } else {
407     PetscCall(ISGetBlockSize(is,&bs));
408     PetscCall(ISBlockGetIndices(is,&indices));
409     PetscCall(ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping));
410     PetscCall(ISBlockRestoreIndices(is,&indices));
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 /*@C
416     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
417     ordering and a global parallel ordering.
418 
419     Collective
420 
421     Input Parameters:
422 +   sf - star forest mapping contiguous local indices to (rank, offset)
423 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
424 
425     Output Parameter:
426 .   mapping - new mapping data structure
427 
428     Level: advanced
429 
430     Notes:
431     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
432 
433 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
434 @*/
435 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
436 {
437   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
438   const PetscInt *ilocal;
439   MPI_Comm       comm;
440 
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
443   PetscValidPointer(mapping,3);
444 
445   PetscCall(PetscObjectGetComm((PetscObject)sf,&comm));
446   PetscCall(PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL));
447   if (start == PETSC_DECIDE) {
448     start = 0;
449     PetscCallMPI(MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm));
450   } else PetscCheckFalse(start < 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
451   if (ilocal) {
452     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
453   }
454   else maxlocal = nleaves;
455   PetscCall(PetscMalloc1(nroots,&globals));
456   PetscCall(PetscMalloc1(maxlocal,&ltog));
457   for (i=0; i<nroots; i++) globals[i] = start + i;
458   for (i=0; i<maxlocal; i++) ltog[i] = -1;
459   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
460   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
461   PetscCall(ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping));
462   PetscCall(PetscFree(globals));
463   PetscFunctionReturn(0);
464 }
465 
466 /*@
467     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
468 
469     Not collective
470 
471     Input Parameters:
472 +   mapping - mapping data structure
473 -   bs - the blocksize
474 
475     Level: advanced
476 
477 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
478 @*/
479 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
480 {
481   PetscInt       *nid;
482   const PetscInt *oid;
483   PetscInt       i,cn,on,obs,nn;
484 
485   PetscFunctionBegin;
486   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
487   PetscCheckFalse(bs < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
488   if (bs == mapping->bs) PetscFunctionReturn(0);
489   on  = mapping->n;
490   obs = mapping->bs;
491   oid = mapping->indices;
492   nn  = (on*obs)/bs;
493   PetscCheckFalse((on*obs)%bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
494 
495   PetscCall(PetscMalloc1(nn,&nid));
496   PetscCall(ISLocalToGlobalMappingGetIndices(mapping,&oid));
497   for (i=0;i<nn;i++) {
498     PetscInt j;
499     for (j=0,cn=0;j<bs-1;j++) {
500       if (oid[i*bs+j] < 0) { cn++; continue; }
501       PetscCheckFalse(oid[i*bs+j] != oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
502     }
503     if (oid[i*bs+j] < 0) cn++;
504     if (cn) {
505       PetscCheckFalse(cn != bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
506       nid[i] = -1;
507     } else {
508       nid[i] = oid[i*bs]/bs;
509     }
510   }
511   PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping,&oid));
512 
513   mapping->n           = nn;
514   mapping->bs          = bs;
515   PetscCall(PetscFree(mapping->indices));
516   mapping->indices     = nid;
517   mapping->globalstart = 0;
518   mapping->globalend   = 0;
519 
520   /* reset the cached information */
521   PetscCall(PetscFree(mapping->info_procs));
522   PetscCall(PetscFree(mapping->info_numprocs));
523   if (mapping->info_indices) {
524     PetscInt i;
525 
526     PetscCall(PetscFree((mapping->info_indices)[0]));
527     for (i=1; i<mapping->info_nproc; i++) {
528       PetscCall(PetscFree(mapping->info_indices[i]));
529     }
530     PetscCall(PetscFree(mapping->info_indices));
531   }
532   mapping->info_cached = PETSC_FALSE;
533 
534   if (mapping->ops->destroy) {
535     PetscCall((*mapping->ops->destroy)(mapping));
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 /*@
541     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
542     ordering and a global parallel ordering.
543 
544     Not Collective
545 
546     Input Parameters:
547 .   mapping - mapping data structure
548 
549     Output Parameter:
550 .   bs - the blocksize
551 
552     Level: advanced
553 
554 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
555 @*/
556 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
557 {
558   PetscFunctionBegin;
559   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
560   *bs = mapping->bs;
561   PetscFunctionReturn(0);
562 }
563 
564 /*@
565     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
566     ordering and a global parallel ordering.
567 
568     Not Collective, but communicator may have more than one process
569 
570     Input Parameters:
571 +   comm - MPI communicator
572 .   bs - the block size
573 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
574 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
575 -   mode - see PetscCopyMode
576 
577     Output Parameter:
578 .   mapping - new mapping data structure
579 
580     Notes:
581     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
582 
583     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
584     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
585     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
586 
587     Level: advanced
588 
589 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
590           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
591 @*/
592 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
593 {
594   PetscInt       *in;
595 
596   PetscFunctionBegin;
597   if (n) PetscValidIntPointer(indices,4);
598   PetscValidPointer(mapping,6);
599 
600   *mapping = NULL;
601   PetscCall(ISInitializePackage());
602 
603   PetscCall(PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView));
604   (*mapping)->n  = n;
605   (*mapping)->bs = bs;
606   if (mode == PETSC_COPY_VALUES) {
607     PetscCall(PetscMalloc1(n,&in));
608     PetscCall(PetscArraycpy(in,indices,n));
609     (*mapping)->indices = in;
610     (*mapping)->dealloc_indices = PETSC_TRUE;
611     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
612   } else if (mode == PETSC_OWN_POINTER) {
613     (*mapping)->indices = (PetscInt*)indices;
614     (*mapping)->dealloc_indices = PETSC_TRUE;
615     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
616   } else if (mode == PETSC_USE_POINTER) {
617     (*mapping)->indices = (PetscInt*)indices;
618   }
619   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
620   PetscFunctionReturn(0);
621 }
622 
623 PetscFunctionList ISLocalToGlobalMappingList = NULL;
624 
625 /*@
626    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
627 
628    Not collective
629 
630    Input Parameters:
631 .  mapping - mapping data structure
632 
633    Level: advanced
634 
635 @*/
636 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
637 {
638   PetscErrorCode             ierr;
639   char                       type[256];
640   ISLocalToGlobalMappingType defaulttype = "Not set";
641   PetscBool                  flg;
642 
643   PetscFunctionBegin;
644   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
645   PetscCall(ISLocalToGlobalMappingRegisterAll());
646   ierr = PetscObjectOptionsBegin((PetscObject)mapping);PetscCall(ierr);
647   PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg));
648   if (flg) {
649     PetscCall(ISLocalToGlobalMappingSetType(mapping,type));
650   }
651   ierr = PetscOptionsEnd();PetscCall(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 /*@
656    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
657    ordering and a global parallel ordering.
658 
659    Note Collective
660 
661    Input Parameters:
662 .  mapping - mapping data structure
663 
664    Level: advanced
665 
666 .seealso: ISLocalToGlobalMappingCreate()
667 @*/
668 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
669 {
670   PetscFunctionBegin;
671   if (!*mapping) PetscFunctionReturn(0);
672   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
673   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
674   if ((*mapping)->dealloc_indices) {
675     PetscCall(PetscFree((*mapping)->indices));
676   }
677   PetscCall(PetscFree((*mapping)->info_procs));
678   PetscCall(PetscFree((*mapping)->info_numprocs));
679   if ((*mapping)->info_indices) {
680     PetscInt i;
681 
682     PetscCall(PetscFree(((*mapping)->info_indices)[0]));
683     for (i=1; i<(*mapping)->info_nproc; i++) {
684       PetscCall(PetscFree(((*mapping)->info_indices)[i]));
685     }
686     PetscCall(PetscFree((*mapping)->info_indices));
687   }
688   if ((*mapping)->info_nodei) {
689     PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
690   }
691   PetscCall(PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei));
692   if ((*mapping)->ops->destroy) {
693     PetscCall((*(*mapping)->ops->destroy)(*mapping));
694   }
695   PetscCall(PetscHeaderDestroy(mapping));
696   *mapping = NULL;
697   PetscFunctionReturn(0);
698 }
699 
700 /*@
701     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
702     a new index set using the global numbering defined in an ISLocalToGlobalMapping
703     context.
704 
705     Collective on is
706 
707     Input Parameters:
708 +   mapping - mapping between local and global numbering
709 -   is - index set in local numbering
710 
711     Output Parameters:
712 .   newis - index set in global numbering
713 
714     Notes:
715     The output IS will have the same communicator of the input IS.
716 
717     Level: advanced
718 
719 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
720           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
721 @*/
722 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
723 {
724   PetscInt       n,*idxout;
725   const PetscInt *idxin;
726 
727   PetscFunctionBegin;
728   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
729   PetscValidHeaderSpecific(is,IS_CLASSID,2);
730   PetscValidPointer(newis,3);
731 
732   PetscCall(ISGetLocalSize(is,&n));
733   PetscCall(ISGetIndices(is,&idxin));
734   PetscCall(PetscMalloc1(n,&idxout));
735   PetscCall(ISLocalToGlobalMappingApply(mapping,n,idxin,idxout));
736   PetscCall(ISRestoreIndices(is,&idxin));
737   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis));
738   PetscFunctionReturn(0);
739 }
740 
741 /*@
742    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
743    and converts them to the global numbering.
744 
745    Not collective
746 
747    Input Parameters:
748 +  mapping - the local to global mapping context
749 .  N - number of integers
750 -  in - input indices in local numbering
751 
752    Output Parameter:
753 .  out - indices in global numbering
754 
755    Notes:
756    The in and out array parameters may be identical.
757 
758    Level: advanced
759 
760 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
761           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
762           AOPetscToApplication(), ISGlobalToLocalMappingApply()
763 
764 @*/
765 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
766 {
767   PetscInt i,bs,Nmax;
768 
769   PetscFunctionBegin;
770   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
771   bs   = mapping->bs;
772   Nmax = bs*mapping->n;
773   if (bs == 1) {
774     const PetscInt *idx = mapping->indices;
775     for (i=0; i<N; i++) {
776       if (in[i] < 0) {
777         out[i] = in[i];
778         continue;
779       }
780       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
781       out[i] = idx[in[i]];
782     }
783   } else {
784     const PetscInt *idx = mapping->indices;
785     for (i=0; i<N; i++) {
786       if (in[i] < 0) {
787         out[i] = in[i];
788         continue;
789       }
790       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
791       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
792     }
793   }
794   PetscFunctionReturn(0);
795 }
796 
797 /*@
798    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
799 
800    Not collective
801 
802    Input Parameters:
803 +  mapping - the local to global mapping context
804 .  N - number of integers
805 -  in - input indices in local block numbering
806 
807    Output Parameter:
808 .  out - indices in global block numbering
809 
810    Notes:
811    The in and out array parameters may be identical.
812 
813    Example:
814      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
815      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
816 
817    Level: advanced
818 
819 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
820           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
821           AOPetscToApplication(), ISGlobalToLocalMappingApply()
822 
823 @*/
824 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
825 {
826   PetscInt       i,Nmax;
827   const PetscInt *idx;
828 
829   PetscFunctionBegin;
830   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
831   Nmax = mapping->n;
832   idx = mapping->indices;
833   for (i=0; i<N; i++) {
834     if (in[i] < 0) {
835       out[i] = in[i];
836       continue;
837     }
838     PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
839     out[i] = idx[in[i]];
840   }
841   PetscFunctionReturn(0);
842 }
843 
844 /*@
845     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
846     specified with a global numbering.
847 
848     Not collective
849 
850     Input Parameters:
851 +   mapping - mapping between local and global numbering
852 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
853            IS_GTOLM_DROP - drops the indices with no local value from the output list
854 .   n - number of global indices to map
855 -   idx - global indices to map
856 
857     Output Parameters:
858 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
859 -   idxout - local index of each global index, one must pass in an array long enough
860              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
861              idxout == NULL to determine the required length (returned in nout)
862              and then allocate the required space and call ISGlobalToLocalMappingApply()
863              a second time to set the values.
864 
865     Notes:
866     Either nout or idxout may be NULL. idx and idxout may be identical.
867 
868     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
869     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
870     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
871 
872     Level: advanced
873 
874     Developer Note: The manual page states that idx and idxout may be identical but the calling
875        sequence declares idx as const so it cannot be the same as idxout.
876 
877 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
878           ISLocalToGlobalMappingDestroy()
879 @*/
880 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
881 {
882   PetscFunctionBegin;
883   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
884   if (!mapping->data) {
885     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
886   }
887   PetscCall((*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout));
888   PetscFunctionReturn(0);
889 }
890 
891 /*@
892     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
893     a new index set using the local numbering defined in an ISLocalToGlobalMapping
894     context.
895 
896     Not collective
897 
898     Input Parameters:
899 +   mapping - mapping between local and global numbering
900 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
901            IS_GTOLM_DROP - drops the indices with no local value from the output list
902 -   is - index set in global numbering
903 
904     Output Parameters:
905 .   newis - index set in local numbering
906 
907     Notes:
908     The output IS will be sequential, as it encodes a purely local operation
909 
910     Level: advanced
911 
912 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
913           ISLocalToGlobalMappingDestroy()
914 @*/
915 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
916 {
917   PetscInt       n,nout,*idxout;
918   const PetscInt *idxin;
919 
920   PetscFunctionBegin;
921   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
922   PetscValidHeaderSpecific(is,IS_CLASSID,3);
923   PetscValidPointer(newis,4);
924 
925   PetscCall(ISGetLocalSize(is,&n));
926   PetscCall(ISGetIndices(is,&idxin));
927   if (type == IS_GTOLM_MASK) {
928     PetscCall(PetscMalloc1(n,&idxout));
929   } else {
930     PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL));
931     PetscCall(PetscMalloc1(nout,&idxout));
932   }
933   PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout));
934   PetscCall(ISRestoreIndices(is,&idxin));
935   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis));
936   PetscFunctionReturn(0);
937 }
938 
939 /*@
940     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
941     specified with a block global numbering.
942 
943     Not collective
944 
945     Input Parameters:
946 +   mapping - mapping between local and global numbering
947 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
948            IS_GTOLM_DROP - drops the indices with no local value from the output list
949 .   n - number of global indices to map
950 -   idx - global indices to map
951 
952     Output Parameters:
953 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
954 -   idxout - local index of each global index, one must pass in an array long enough
955              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
956              idxout == NULL to determine the required length (returned in nout)
957              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
958              a second time to set the values.
959 
960     Notes:
961     Either nout or idxout may be NULL. idx and idxout may be identical.
962 
963     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
964     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
965     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
966 
967     Level: advanced
968 
969     Developer Note: The manual page states that idx and idxout may be identical but the calling
970        sequence declares idx as const so it cannot be the same as idxout.
971 
972 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
973           ISLocalToGlobalMappingDestroy()
974 @*/
975 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
976                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
977 {
978   PetscFunctionBegin;
979   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
980   if (!mapping->data) {
981     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
982   }
983   PetscCall((*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout));
984   PetscFunctionReturn(0);
985 }
986 
987 /*@C
988     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
989      each index shared by more than one processor
990 
991     Collective on ISLocalToGlobalMapping
992 
993     Input Parameter:
994 .   mapping - the mapping from local to global indexing
995 
996     Output Parameters:
997 +   nproc - number of processors that are connected to this one
998 .   proc - neighboring processors
999 .   numproc - number of indices for each subdomain (processor)
1000 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1001 
1002     Level: advanced
1003 
1004     Fortran Usage:
1005 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1006 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1007           PetscInt indices[nproc][numprocmax],ierr)
1008         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1009         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1010 
1011 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1012           ISLocalToGlobalMappingRestoreInfo()
1013 @*/
1014 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1015 {
1016   PetscFunctionBegin;
1017   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1018   if (mapping->info_cached) {
1019     *nproc    = mapping->info_nproc;
1020     *procs    = mapping->info_procs;
1021     *numprocs = mapping->info_numprocs;
1022     *indices  = mapping->info_indices;
1023   } else {
1024     PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices));
1025   }
1026   PetscFunctionReturn(0);
1027 }
1028 
1029 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1030 {
1031   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1032   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1033   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1034   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1035   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1036   PetscInt       first_procs,first_numprocs,*first_indices;
1037   MPI_Request    *recv_waits,*send_waits;
1038   MPI_Status     recv_status,*send_status,*recv_statuses;
1039   MPI_Comm       comm;
1040   PetscBool      debug = PETSC_FALSE;
1041 
1042   PetscFunctionBegin;
1043   PetscCall(PetscObjectGetComm((PetscObject)mapping,&comm));
1044   PetscCallMPI(MPI_Comm_size(comm,&size));
1045   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1046   if (size == 1) {
1047     *nproc         = 0;
1048     *procs         = NULL;
1049     PetscCall(PetscNew(numprocs));
1050     (*numprocs)[0] = 0;
1051     PetscCall(PetscNew(indices));
1052     (*indices)[0]  = NULL;
1053     /* save info for reuse */
1054     mapping->info_nproc = *nproc;
1055     mapping->info_procs = *procs;
1056     mapping->info_numprocs = *numprocs;
1057     mapping->info_indices = *indices;
1058     mapping->info_cached = PETSC_TRUE;
1059     PetscFunctionReturn(0);
1060   }
1061 
1062   PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL));
1063 
1064   /*
1065     Notes on ISLocalToGlobalMappingGetBlockInfo
1066 
1067     globally owned node - the nodes that have been assigned to this processor in global
1068            numbering, just for this routine.
1069 
1070     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1071            boundary (i.e. is has more than one local owner)
1072 
1073     locally owned node - node that exists on this processors subdomain
1074 
1075     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1076            local subdomain
1077   */
1078   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag1));
1079   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag2));
1080   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag3));
1081 
1082   for (i=0; i<n; i++) {
1083     if (lindices[i] > max) max = lindices[i];
1084   }
1085   PetscCall(MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm));
1086   Ng++;
1087   PetscCallMPI(MPI_Comm_size(comm,&size));
1088   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1089   scale  = Ng/size + 1;
1090   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1091   rstart = scale*rank;
1092 
1093   /* determine ownership ranges of global indices */
1094   PetscCall(PetscMalloc1(2*size,&nprocs));
1095   PetscCall(PetscArrayzero(nprocs,2*size));
1096 
1097   /* determine owners of each local node  */
1098   PetscCall(PetscMalloc1(n,&owner));
1099   for (i=0; i<n; i++) {
1100     proc             = lindices[i]/scale; /* processor that globally owns this index */
1101     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1102     owner[i]         = proc;
1103     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1104   }
1105   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1106   PetscCall(PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends));
1107 
1108   /* inform other processors of number of messages and max length*/
1109   PetscCall(PetscMaxSum(comm,nprocs,&nmax,&nrecvs));
1110   PetscCall(PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs));
1111 
1112   /* post receives for owned rows */
1113   PetscCall(PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs));
1114   PetscCall(PetscMalloc1(nrecvs+1,&recv_waits));
1115   for (i=0; i<nrecvs; i++) {
1116     PetscCallMPI(MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i));
1117   }
1118 
1119   /* pack messages containing lists of local nodes to owners */
1120   PetscCall(PetscMalloc1(2*n+1,&sends));
1121   PetscCall(PetscMalloc1(size+1,&starts));
1122   starts[0] = 0;
1123   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1124   for (i=0; i<n; i++) {
1125     sends[starts[owner[i]]++] = lindices[i];
1126     sends[starts[owner[i]]++] = i;
1127   }
1128   PetscCall(PetscFree(owner));
1129   starts[0] = 0;
1130   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1131 
1132   /* send the messages */
1133   PetscCall(PetscMalloc1(nsends+1,&send_waits));
1134   PetscCall(PetscMalloc1(nsends+1,&dest));
1135   cnt = 0;
1136   for (i=0; i<size; i++) {
1137     if (nprocs[2*i]) {
1138       PetscCallMPI(MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt));
1139       dest[cnt] = i;
1140       cnt++;
1141     }
1142   }
1143   PetscCall(PetscFree(starts));
1144 
1145   /* wait on receives */
1146   PetscCall(PetscMalloc1(nrecvs+1,&source));
1147   PetscCall(PetscMalloc1(nrecvs+1,&len));
1148   cnt  = nrecvs;
1149   PetscCall(PetscCalloc1(ng+1,&nownedsenders));
1150   while (cnt) {
1151     PetscCallMPI(MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status));
1152     /* unpack receives into our local space */
1153     PetscCallMPI(MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]));
1154     source[imdex] = recv_status.MPI_SOURCE;
1155     len[imdex]    = len[imdex]/2;
1156     /* count how many local owners for each of my global owned indices */
1157     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1158     cnt--;
1159   }
1160   PetscCall(PetscFree(recv_waits));
1161 
1162   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1163   nowned  = 0;
1164   nownedm = 0;
1165   for (i=0; i<ng; i++) {
1166     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1167   }
1168 
1169   /* create single array to contain rank of all local owners of each globally owned index */
1170   PetscCall(PetscMalloc1(nownedm+1,&ownedsenders));
1171   PetscCall(PetscMalloc1(ng+1,&starts));
1172   starts[0] = 0;
1173   for (i=1; i<ng; i++) {
1174     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1175     else starts[i] = starts[i-1];
1176   }
1177 
1178   /* for each nontrivial globally owned node list all arriving processors */
1179   for (i=0; i<nrecvs; i++) {
1180     for (j=0; j<len[i]; j++) {
1181       node = recvs[2*i*nmax+2*j]-rstart;
1182       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1183     }
1184   }
1185 
1186   if (debug) { /* -----------------------------------  */
1187     starts[0] = 0;
1188     for (i=1; i<ng; i++) {
1189       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1190       else starts[i] = starts[i-1];
1191     }
1192     for (i=0; i<ng; i++) {
1193       if (nownedsenders[i] > 1) {
1194         PetscCall(PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart));
1195         for (j=0; j<nownedsenders[i]; j++) {
1196           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]));
1197         }
1198         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1199       }
1200     }
1201     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1202   } /* -----------------------------------  */
1203 
1204   /* wait on original sends */
1205   if (nsends) {
1206     PetscCall(PetscMalloc1(nsends,&send_status));
1207     PetscCallMPI(MPI_Waitall(nsends,send_waits,send_status));
1208     PetscCall(PetscFree(send_status));
1209   }
1210   PetscCall(PetscFree(send_waits));
1211   PetscCall(PetscFree(sends));
1212   PetscCall(PetscFree(nprocs));
1213 
1214   /* pack messages to send back to local owners */
1215   starts[0] = 0;
1216   for (i=1; i<ng; i++) {
1217     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1218     else starts[i] = starts[i-1];
1219   }
1220   nsends2 = nrecvs;
1221   PetscCall(PetscMalloc1(nsends2+1,&nprocs)); /* length of each message */
1222   for (i=0; i<nrecvs; i++) {
1223     nprocs[i] = 1;
1224     for (j=0; j<len[i]; j++) {
1225       node = recvs[2*i*nmax+2*j]-rstart;
1226       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1227     }
1228   }
1229   nt = 0;
1230   for (i=0; i<nsends2; i++) nt += nprocs[i];
1231 
1232   PetscCall(PetscMalloc1(nt+1,&sends2));
1233   PetscCall(PetscMalloc1(nsends2+1,&starts2));
1234 
1235   starts2[0] = 0;
1236   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1237   /*
1238      Each message is 1 + nprocs[i] long, and consists of
1239        (0) the number of nodes being sent back
1240        (1) the local node number,
1241        (2) the number of processors sharing it,
1242        (3) the processors sharing it
1243   */
1244   for (i=0; i<nsends2; i++) {
1245     cnt = 1;
1246     sends2[starts2[i]] = 0;
1247     for (j=0; j<len[i]; j++) {
1248       node = recvs[2*i*nmax+2*j]-rstart;
1249       if (nownedsenders[node] > 1) {
1250         sends2[starts2[i]]++;
1251         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1252         sends2[starts2[i]+cnt++] = nownedsenders[node];
1253         PetscCall(PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]));
1254         cnt += nownedsenders[node];
1255       }
1256     }
1257   }
1258 
1259   /* receive the message lengths */
1260   nrecvs2 = nsends;
1261   PetscCall(PetscMalloc1(nrecvs2+1,&lens2));
1262   PetscCall(PetscMalloc1(nrecvs2+1,&starts3));
1263   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1264   for (i=0; i<nrecvs2; i++) {
1265     PetscCallMPI(MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i));
1266   }
1267 
1268   /* send the message lengths */
1269   for (i=0; i<nsends2; i++) {
1270     PetscCallMPI(MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm));
1271   }
1272 
1273   /* wait on receives of lens */
1274   if (nrecvs2) {
1275     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1276     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1277     PetscCall(PetscFree(recv_statuses));
1278   }
1279   PetscCall(PetscFree(recv_waits));
1280 
1281   starts3[0] = 0;
1282   nt         = 0;
1283   for (i=0; i<nrecvs2-1; i++) {
1284     starts3[i+1] = starts3[i] + lens2[i];
1285     nt          += lens2[i];
1286   }
1287   if (nrecvs2) nt += lens2[nrecvs2-1];
1288 
1289   PetscCall(PetscMalloc1(nt+1,&recvs2));
1290   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1291   for (i=0; i<nrecvs2; i++) {
1292     PetscCallMPI(MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i));
1293   }
1294 
1295   /* send the messages */
1296   PetscCall(PetscMalloc1(nsends2+1,&send_waits));
1297   for (i=0; i<nsends2; i++) {
1298     PetscCallMPI(MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i));
1299   }
1300 
1301   /* wait on receives */
1302   if (nrecvs2) {
1303     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1304     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1305     PetscCall(PetscFree(recv_statuses));
1306   }
1307   PetscCall(PetscFree(recv_waits));
1308   PetscCall(PetscFree(nprocs));
1309 
1310   if (debug) { /* -----------------------------------  */
1311     cnt = 0;
1312     for (i=0; i<nrecvs2; i++) {
1313       nt = recvs2[cnt++];
1314       for (j=0; j<nt; j++) {
1315         PetscCall(PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]));
1316         for (k=0; k<recvs2[cnt+1]; k++) {
1317           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]));
1318         }
1319         cnt += 2 + recvs2[cnt+1];
1320         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1321       }
1322     }
1323     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1324   } /* -----------------------------------  */
1325 
1326   /* count number subdomains for each local node */
1327   PetscCall(PetscCalloc1(size,&nprocs));
1328   cnt  = 0;
1329   for (i=0; i<nrecvs2; i++) {
1330     nt = recvs2[cnt++];
1331     for (j=0; j<nt; j++) {
1332       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1333       cnt += 2 + recvs2[cnt+1];
1334     }
1335   }
1336   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1337   *nproc    = nt;
1338   PetscCall(PetscMalloc1(nt+1,procs));
1339   PetscCall(PetscMalloc1(nt+1,numprocs));
1340   PetscCall(PetscMalloc1(nt+1,indices));
1341   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1342   PetscCall(PetscMalloc1(size,&bprocs));
1343   cnt  = 0;
1344   for (i=0; i<size; i++) {
1345     if (nprocs[i] > 0) {
1346       bprocs[i]        = cnt;
1347       (*procs)[cnt]    = i;
1348       (*numprocs)[cnt] = nprocs[i];
1349       PetscCall(PetscMalloc1(nprocs[i],&(*indices)[cnt]));
1350       cnt++;
1351     }
1352   }
1353 
1354   /* make the list of subdomains for each nontrivial local node */
1355   PetscCall(PetscArrayzero(*numprocs,nt));
1356   cnt  = 0;
1357   for (i=0; i<nrecvs2; i++) {
1358     nt = recvs2[cnt++];
1359     for (j=0; j<nt; j++) {
1360       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1361       cnt += 2 + recvs2[cnt+1];
1362     }
1363   }
1364   PetscCall(PetscFree(bprocs));
1365   PetscCall(PetscFree(recvs2));
1366 
1367   /* sort the node indexing by their global numbers */
1368   nt = *nproc;
1369   for (i=0; i<nt; i++) {
1370     PetscCall(PetscMalloc1((*numprocs)[i],&tmp));
1371     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1372     PetscCall(PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]));
1373     PetscCall(PetscFree(tmp));
1374   }
1375 
1376   if (debug) { /* -----------------------------------  */
1377     nt = *nproc;
1378     for (i=0; i<nt; i++) {
1379       PetscCall(PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]));
1380       for (j=0; j<(*numprocs)[i]; j++) {
1381         PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]));
1382       }
1383       PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1384     }
1385     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1386   } /* -----------------------------------  */
1387 
1388   /* wait on sends */
1389   if (nsends2) {
1390     PetscCall(PetscMalloc1(nsends2,&send_status));
1391     PetscCallMPI(MPI_Waitall(nsends2,send_waits,send_status));
1392     PetscCall(PetscFree(send_status));
1393   }
1394 
1395   PetscCall(PetscFree(starts3));
1396   PetscCall(PetscFree(dest));
1397   PetscCall(PetscFree(send_waits));
1398 
1399   PetscCall(PetscFree(nownedsenders));
1400   PetscCall(PetscFree(ownedsenders));
1401   PetscCall(PetscFree(starts));
1402   PetscCall(PetscFree(starts2));
1403   PetscCall(PetscFree(lens2));
1404 
1405   PetscCall(PetscFree(source));
1406   PetscCall(PetscFree(len));
1407   PetscCall(PetscFree(recvs));
1408   PetscCall(PetscFree(nprocs));
1409   PetscCall(PetscFree(sends2));
1410 
1411   /* put the information about myself as the first entry in the list */
1412   first_procs    = (*procs)[0];
1413   first_numprocs = (*numprocs)[0];
1414   first_indices  = (*indices)[0];
1415   for (i=0; i<*nproc; i++) {
1416     if ((*procs)[i] == rank) {
1417       (*procs)[0]    = (*procs)[i];
1418       (*numprocs)[0] = (*numprocs)[i];
1419       (*indices)[0]  = (*indices)[i];
1420       (*procs)[i]    = first_procs;
1421       (*numprocs)[i] = first_numprocs;
1422       (*indices)[i]  = first_indices;
1423       break;
1424     }
1425   }
1426 
1427   /* save info for reuse */
1428   mapping->info_nproc = *nproc;
1429   mapping->info_procs = *procs;
1430   mapping->info_numprocs = *numprocs;
1431   mapping->info_indices = *indices;
1432   mapping->info_cached = PETSC_TRUE;
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 /*@C
1437     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1438 
1439     Collective on ISLocalToGlobalMapping
1440 
1441     Input Parameter:
1442 .   mapping - the mapping from local to global indexing
1443 
1444     Output Parameters:
1445 +   nproc - number of processors that are connected to this one
1446 .   proc - neighboring processors
1447 .   numproc - number of indices for each processor
1448 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1449 
1450     Level: advanced
1451 
1452 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1453           ISLocalToGlobalMappingGetInfo()
1454 @*/
1455 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1456 {
1457   PetscFunctionBegin;
1458   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1459   if (mapping->info_free) {
1460     PetscCall(PetscFree(*numprocs));
1461     if (*indices) {
1462       PetscInt i;
1463 
1464       PetscCall(PetscFree((*indices)[0]));
1465       for (i=1; i<*nproc; i++) {
1466         PetscCall(PetscFree((*indices)[i]));
1467       }
1468       PetscCall(PetscFree(*indices));
1469     }
1470   }
1471   *nproc    = 0;
1472   *procs    = NULL;
1473   *numprocs = NULL;
1474   *indices  = NULL;
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 /*@C
1479     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1480      each index shared by more than one processor
1481 
1482     Collective on ISLocalToGlobalMapping
1483 
1484     Input Parameter:
1485 .   mapping - the mapping from local to global indexing
1486 
1487     Output Parameters:
1488 +   nproc - number of processors that are connected to this one
1489 .   proc - neighboring processors
1490 .   numproc - number of indices for each subdomain (processor)
1491 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1492 
1493     Level: advanced
1494 
1495     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1496 
1497     Fortran Usage:
1498 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1499 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1500           PetscInt indices[nproc][numprocmax],ierr)
1501         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1502         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1503 
1504 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1505           ISLocalToGlobalMappingRestoreInfo()
1506 @*/
1507 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1508 {
1509   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs,i,j,k;
1510 
1511   PetscFunctionBegin;
1512   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1513   bs = mapping->bs;
1514   PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices));
1515   if (bs > 1) { /* we need to expand the cached info */
1516     PetscCall(PetscCalloc1(*nproc,&*indices));
1517     PetscCall(PetscCalloc1(*nproc,&*numprocs));
1518     for (i=0; i<*nproc; i++) {
1519       PetscCall(PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]));
1520       for (j=0; j<bnumprocs[i]; j++) {
1521         for (k=0; k<bs; k++) {
1522           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1523         }
1524       }
1525       (*numprocs)[i] = bnumprocs[i]*bs;
1526     }
1527     mapping->info_free = PETSC_TRUE;
1528   } else {
1529     *numprocs = bnumprocs;
1530     *indices  = bindices;
1531   }
1532   PetscFunctionReturn(0);
1533 }
1534 
1535 /*@C
1536     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1537 
1538     Collective on ISLocalToGlobalMapping
1539 
1540     Input Parameter:
1541 .   mapping - the mapping from local to global indexing
1542 
1543     Output Parameters:
1544 +   nproc - number of processors that are connected to this one
1545 .   proc - neighboring processors
1546 .   numproc - number of indices for each processor
1547 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1548 
1549     Level: advanced
1550 
1551 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1552           ISLocalToGlobalMappingGetInfo()
1553 @*/
1554 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1555 {
1556   PetscFunctionBegin;
1557   PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices));
1558   PetscFunctionReturn(0);
1559 }
1560 
1561 /*@C
1562     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1563 
1564     Collective on ISLocalToGlobalMapping
1565 
1566     Input Parameter:
1567 .   mapping - the mapping from local to global indexing
1568 
1569     Output Parameters:
1570 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1571 .   count - number of neighboring processors per node
1572 -   indices - indices of processes sharing the node (sorted)
1573 
1574     Level: advanced
1575 
1576     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1577 
1578 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1579           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1580 @*/
1581 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1582 {
1583   PetscInt       n;
1584 
1585   PetscFunctionBegin;
1586   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1587   PetscCall(ISLocalToGlobalMappingGetSize(mapping,&n));
1588   if (!mapping->info_nodec) {
1589     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1590 
1591     PetscCall(PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei));
1592     PetscCall(ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1593     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1594     m = n;
1595     mapping->info_nodec[n] = 0;
1596     for (i=1;i<n_neigh;i++) {
1597       PetscInt j;
1598 
1599       m += n_shared[i];
1600       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1601     }
1602     if (n) PetscCall(PetscMalloc1(m,&mapping->info_nodei[0]));
1603     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1604     PetscCall(PetscArrayzero(mapping->info_nodec,n));
1605     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1606     for (i=1;i<n_neigh;i++) {
1607       PetscInt j;
1608 
1609       for (j=0;j<n_shared[i];j++) {
1610         PetscInt k = shared[i][j];
1611 
1612         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1613         mapping->info_nodec[k] += 1;
1614       }
1615     }
1616     for (i=0;i<n;i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]));
1617     PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1618   }
1619   if (nnodes)  *nnodes  = n;
1620   if (count)   *count   = mapping->info_nodec;
1621   if (indices) *indices = mapping->info_nodei;
1622   PetscFunctionReturn(0);
1623 }
1624 
1625 /*@C
1626     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1627 
1628     Collective on ISLocalToGlobalMapping
1629 
1630     Input Parameter:
1631 .   mapping - the mapping from local to global indexing
1632 
1633     Output Parameters:
1634 +   nnodes - number of local nodes
1635 .   count - number of neighboring processors per node
1636 -   indices - indices of processes sharing the node (sorted)
1637 
1638     Level: advanced
1639 
1640 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1641           ISLocalToGlobalMappingGetInfo()
1642 @*/
1643 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1644 {
1645   PetscFunctionBegin;
1646   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1647   if (nnodes)  *nnodes  = 0;
1648   if (count)   *count   = NULL;
1649   if (indices) *indices = NULL;
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 /*@C
1654    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1655 
1656    Not Collective
1657 
1658    Input Parameter:
1659 . ltog - local to global mapping
1660 
1661    Output Parameter:
1662 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1663 
1664    Level: advanced
1665 
1666    Notes:
1667     ISLocalToGlobalMappingGetSize() returns the length the this array
1668 
1669 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1670 @*/
1671 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1672 {
1673   PetscFunctionBegin;
1674   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1675   PetscValidPointer(array,2);
1676   if (ltog->bs == 1) {
1677     *array = ltog->indices;
1678   } else {
1679     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1680     const PetscInt *ii;
1681 
1682     PetscCall(PetscMalloc1(bs*n,&jj));
1683     *array = jj;
1684     k    = 0;
1685     ii   = ltog->indices;
1686     for (i=0; i<n; i++)
1687       for (j=0; j<bs; j++)
1688         jj[k++] = bs*ii[i] + j;
1689   }
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 /*@C
1694    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1695 
1696    Not Collective
1697 
1698    Input Parameters:
1699 + ltog - local to global mapping
1700 - array - array of indices
1701 
1702    Level: advanced
1703 
1704 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1705 @*/
1706 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1707 {
1708   PetscFunctionBegin;
1709   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1710   PetscValidPointer(array,2);
1711   PetscCheckFalse(ltog->bs == 1 && *array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1712 
1713   if (ltog->bs > 1) {
1714     PetscCall(PetscFree(*(void**)array));
1715   }
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 /*@C
1720    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1721 
1722    Not Collective
1723 
1724    Input Parameter:
1725 . ltog - local to global mapping
1726 
1727    Output Parameter:
1728 . array - array of indices
1729 
1730    Level: advanced
1731 
1732 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1733 @*/
1734 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1735 {
1736   PetscFunctionBegin;
1737   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1738   PetscValidPointer(array,2);
1739   *array = ltog->indices;
1740   PetscFunctionReturn(0);
1741 }
1742 
1743 /*@C
1744    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1745 
1746    Not Collective
1747 
1748    Input Parameters:
1749 + ltog - local to global mapping
1750 - array - array of indices
1751 
1752    Level: advanced
1753 
1754 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1755 @*/
1756 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1757 {
1758   PetscFunctionBegin;
1759   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1760   PetscValidPointer(array,2);
1761   PetscCheckFalse(*array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1762   *array = NULL;
1763   PetscFunctionReturn(0);
1764 }
1765 
1766 /*@C
1767    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1768 
1769    Not Collective
1770 
1771    Input Parameters:
1772 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1773 . n - number of mappings to concatenate
1774 - ltogs - local to global mappings
1775 
1776    Output Parameter:
1777 . ltogcat - new mapping
1778 
1779    Note: this currently always returns a mapping with block size of 1
1780 
1781    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1782 
1783    Level: advanced
1784 
1785 .seealso: ISLocalToGlobalMappingCreate()
1786 @*/
1787 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1788 {
1789   PetscInt       i,cnt,m,*idx;
1790 
1791   PetscFunctionBegin;
1792   PetscCheckFalse(n < 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1793   if (n > 0) PetscValidPointer(ltogs,3);
1794   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1795   PetscValidPointer(ltogcat,4);
1796   for (cnt=0,i=0; i<n; i++) {
1797     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1798     cnt += m;
1799   }
1800   PetscCall(PetscMalloc1(cnt,&idx));
1801   for (cnt=0,i=0; i<n; i++) {
1802     const PetscInt *subidx;
1803     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1804     PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx));
1805     PetscCall(PetscArraycpy(&idx[cnt],subidx,m));
1806     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx));
1807     cnt += m;
1808   }
1809   PetscCall(ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat));
1810   PetscFunctionReturn(0);
1811 }
1812 
1813 /*MC
1814       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1815                                     used this is good for only small and moderate size problems.
1816 
1817    Options Database Keys:
1818 .   -islocaltoglobalmapping_type basic - select this method
1819 
1820    Level: beginner
1821 
1822 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1823 M*/
1824 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1825 {
1826   PetscFunctionBegin;
1827   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1828   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1829   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1830   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1831   PetscFunctionReturn(0);
1832 }
1833 
1834 /*MC
1835       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1836                                     used this is good for large memory problems.
1837 
1838    Options Database Keys:
1839 .   -islocaltoglobalmapping_type hash - select this method
1840 
1841    Notes:
1842     This is selected automatically for large problems if the user does not set the type.
1843 
1844    Level: beginner
1845 
1846 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1847 M*/
1848 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1849 {
1850   PetscFunctionBegin;
1851   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1852   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1853   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1854   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1855   PetscFunctionReturn(0);
1856 }
1857 
1858 /*@C
1859     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1860 
1861    Not Collective
1862 
1863    Input Parameters:
1864 +  sname - name of a new method
1865 -  routine_create - routine to create method context
1866 
1867    Notes:
1868    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1869 
1870    Sample usage:
1871 .vb
1872    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1873 .ve
1874 
1875    Then, your mapping can be chosen with the procedural interface via
1876 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1877    or at runtime via the option
1878 $     -islocaltoglobalmapping_type my_mapper
1879 
1880    Level: advanced
1881 
1882 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1883 
1884 @*/
1885 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1886 {
1887   PetscFunctionBegin;
1888   PetscCall(ISInitializePackage());
1889   PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function));
1890   PetscFunctionReturn(0);
1891 }
1892 
1893 /*@C
1894    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1895 
1896    Logically Collective on ISLocalToGlobalMapping
1897 
1898    Input Parameters:
1899 +  ltog - the ISLocalToGlobalMapping object
1900 -  type - a known method
1901 
1902    Options Database Key:
1903 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1904     of available methods (for instance, basic or hash)
1905 
1906    Notes:
1907    See "petsc/include/petscis.h" for available methods
1908 
1909   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1910   then set the ISLocalToGlobalMapping type from the options database rather than by using
1911   this routine.
1912 
1913   Level: intermediate
1914 
1915   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1916   are accessed by ISLocalToGlobalMappingSetType().
1917 
1918 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingGetType()
1919 @*/
1920 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1921 {
1922   PetscBool      match;
1923   PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1924 
1925   PetscFunctionBegin;
1926   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1927   if (type) PetscValidCharPointer(type,2);
1928 
1929   PetscCall(PetscObjectTypeCompare((PetscObject)ltog,type,&match));
1930   if (match) PetscFunctionReturn(0);
1931 
1932   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1933   if (type) {
1934     PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r));
1935     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1936   }
1937   /* Destroy the previous private LTOG context */
1938   if (ltog->ops->destroy) {
1939     PetscCall((*ltog->ops->destroy)(ltog));
1940     ltog->ops->destroy = NULL;
1941   }
1942   PetscCall(PetscObjectChangeTypeName((PetscObject)ltog,type));
1943   if (r) PetscCall((*r)(ltog));
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 /*@C
1948    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1949 
1950    Not Collective
1951 
1952    Input Parameter:
1953 .  ltog - the ISLocalToGlobalMapping object
1954 
1955    Output Parameter:
1956 .  type - the type
1957 
1958    Level: intermediate
1959 
1960 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType()
1961 @*/
1962 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1963 {
1964   PetscFunctionBegin;
1965   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1966   PetscValidPointer(type,2);
1967   *type = ((PetscObject)ltog)->type_name;
1968   PetscFunctionReturn(0);
1969 }
1970 
1971 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1972 
1973 /*@C
1974   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1975 
1976   Not Collective
1977 
1978   Level: advanced
1979 
1980 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1981 @*/
1982 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1983 {
1984   PetscFunctionBegin;
1985   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1986   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1987   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
1988   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
1989   PetscFunctionReturn(0);
1990 }
1991