xref: /petsc/src/vec/is/utils/isltog.c (revision 26cc229b24afffee8e9a203ddc8754b40d660fc7)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: `ISRestorePointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47 
48   PetscFunctionBeginHot;
49   *pStart = 0;
50   *points = NULL;
51   PetscCall(ISGetLocalSize(pointIS, &numCells));
52   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
53   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
54   *pEnd   = *pStart + numCells;
55   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
56   PetscFunctionReturn(0);
57 }
58 
59 /*@C
60   ISRestorePointRange - Destroys the traversal description
61 
62   Not collective
63 
64   Input Parameters:
65 + pointIS - The IS object
66 . pStart  - The first index, from ISGetPointRange()
67 . pEnd    - One past the last index, from ISGetPointRange()
68 - points  - The indices, from ISGetPointRange()
69 
70   Notes:
71   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
72 $ ISGetPointRange(is, &pStart, &pEnd, &points);
73 $ for (p = pStart; p < pEnd; ++p) {
74 $   const PetscInt point = points ? points[p] : p;
75 $ }
76 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
77 
78   Level: intermediate
79 
80 .seealso: `ISGetPointRange()`, `ISGetPointSubrange()`, `ISGetIndices()`, `ISCreateStride()`
81 @*/
82 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
83 {
84   PetscInt       step = 1;
85   PetscBool      isStride;
86 
87   PetscFunctionBeginHot;
88   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
89   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
90   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
91   PetscFunctionReturn(0);
92 }
93 
94 /*@C
95   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
96 
97   Not collective
98 
99   Input Parameters:
100 + subpointIS - The IS object to be configured
101 . pStar   t  - The first index of the subrange
102 . pEnd       - One past the last index for the subrange
103 - points     - The indices for the entire range, from ISGetPointRange()
104 
105   Output Parameters:
106 . subpointIS - The IS object now configured to be a subrange
107 
108   Notes:
109   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
110 
111   Level: intermediate
112 
113 .seealso: `ISGetPointRange()`, `ISRestorePointRange()`, `ISGetIndices()`, `ISCreateStride()`
114 @*/
115 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
116 {
117   PetscFunctionBeginHot;
118   if (points) {
119     PetscCall(ISSetType(subpointIS, ISGENERAL));
120     PetscCall(ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER));
121   } else {
122     PetscCall(ISSetType(subpointIS, ISSTRIDE));
123     PetscCall(ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /* -----------------------------------------------------------------------------------------*/
129 
130 /*
131     Creates the global mapping information in the ISLocalToGlobalMapping structure
132 
133     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
134 */
135 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
136 {
137   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
138 
139   PetscFunctionBegin;
140   if (mapping->data) PetscFunctionReturn(0);
141   end   = 0;
142   start = PETSC_MAX_INT;
143 
144   for (i=0; i<n; i++) {
145     if (idx[i] < 0) continue;
146     if (idx[i] < start) start = idx[i];
147     if (idx[i] > end)   end   = idx[i];
148   }
149   if (start > end) {start = 0; end = -1;}
150   mapping->globalstart = start;
151   mapping->globalend   = end;
152   if (!((PetscObject)mapping)->type_name) {
153     if ((end - start) > PetscMax(4*n,1000000)) {
154       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH));
155     } else {
156       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC));
157     }
158   }
159   if (mapping->ops->globaltolocalmappingsetup) PetscCall((*mapping->ops->globaltolocalmappingsetup)(mapping));
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
164 {
165   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
166   ISLocalToGlobalMapping_Basic *map;
167 
168   PetscFunctionBegin;
169   start            = mapping->globalstart;
170   end              = mapping->globalend;
171   PetscCall(PetscNew(&map));
172   PetscCall(PetscMalloc1(end-start+2,&globals));
173   map->globals     = globals;
174   for (i=0; i<end-start+1; i++) globals[i] = -1;
175   for (i=0; i<n; i++) {
176     if (idx[i] < 0) continue;
177     globals[idx[i] - start] = i;
178   }
179   mapping->data = (void*)map;
180   PetscCall(PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt)));
181   PetscFunctionReturn(0);
182 }
183 
184 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
185 {
186   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
187   ISLocalToGlobalMapping_Hash *map;
188 
189   PetscFunctionBegin;
190   PetscCall(PetscNew(&map));
191   PetscCall(PetscHMapICreate(&map->globalht));
192   for (i=0; i<n; i++) {
193     if (idx[i] < 0) continue;
194     PetscCall(PetscHMapISet(map->globalht,idx[i],i));
195   }
196   mapping->data = (void*)map;
197   PetscCall(PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt)));
198   PetscFunctionReturn(0);
199 }
200 
201 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
202 {
203   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
204 
205   PetscFunctionBegin;
206   if (!map) PetscFunctionReturn(0);
207   PetscCall(PetscFree(map->globals));
208   PetscCall(PetscFree(mapping->data));
209   PetscFunctionReturn(0);
210 }
211 
212 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
213 {
214   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
215 
216   PetscFunctionBegin;
217   if (!map) PetscFunctionReturn(0);
218   PetscCall(PetscHMapIDestroy(&map->globalht));
219   PetscCall(PetscFree(mapping->data));
220   PetscFunctionReturn(0);
221 }
222 
223 #define GTOLTYPE _Basic
224 #define GTOLNAME _Basic
225 #define GTOLBS mapping->bs
226 #define GTOL(g, local) do {                  \
227     local = map->globals[g/bs - start];      \
228     if (local >= 0) local = bs*local + (g % bs); \
229   } while (0)
230 
231 #include <../src/vec/is/utils/isltog.h>
232 
233 #define GTOLTYPE _Basic
234 #define GTOLNAME Block_Basic
235 #define GTOLBS 1
236 #define GTOL(g, local) do {                  \
237     local = map->globals[g - start];         \
238   } while (0)
239 #include <../src/vec/is/utils/isltog.h>
240 
241 #define GTOLTYPE _Hash
242 #define GTOLNAME _Hash
243 #define GTOLBS mapping->bs
244 #define GTOL(g, local) do {                         \
245     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
246     if (local >= 0) local = bs*local + (g % bs);   \
247    } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME Block_Hash
252 #define GTOLBS 1
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g,&local);    \
255   } while (0)
256 #include <../src/vec/is/utils/isltog.h>
257 
258 /*@
259     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
260 
261     Not Collective
262 
263     Input Parameter:
264 .   ltog - local to global mapping
265 
266     Output Parameter:
267 .   nltog - the duplicated local to global mapping
268 
269     Level: advanced
270 
271 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
272 @*/
273 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
274 {
275   ISLocalToGlobalMappingType l2gtype;
276 
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
279   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog));
280   PetscCall(ISLocalToGlobalMappingGetType(ltog,&l2gtype));
281   PetscCall(ISLocalToGlobalMappingSetType(*nltog,l2gtype));
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
287 
288     Not Collective
289 
290     Input Parameter:
291 .   ltog - local to global mapping
292 
293     Output Parameter:
294 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
295 
296     Level: advanced
297 
298 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
299 @*/
300 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
301 {
302   PetscFunctionBegin;
303   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
304   PetscValidIntPointer(n,2);
305   *n = mapping->bs*mapping->n;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@C
310    ISLocalToGlobalMappingViewFromOptions - View from Options
311 
312    Collective on ISLocalToGlobalMapping
313 
314    Input Parameters:
315 +  A - the local to global mapping object
316 .  obj - Optional object
317 -  name - command line option
318 
319    Level: intermediate
320 .seealso: `ISLocalToGlobalMapping`, `ISLocalToGlobalMappingView`, `PetscObjectViewFromOptions()`, `ISLocalToGlobalMappingCreate()`
321 @*/
322 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
323 {
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
326   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
327   PetscFunctionReturn(0);
328 }
329 
330 /*@C
331     ISLocalToGlobalMappingView - View a local to global mapping
332 
333     Not Collective
334 
335     Input Parameters:
336 +   ltog - local to global mapping
337 -   viewer - viewer
338 
339     Level: advanced
340 
341 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
344 {
345   PetscInt       i;
346   PetscMPIInt    rank;
347   PetscBool      iascii;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
351   if (!viewer) {
352     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer));
353   }
354   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
355 
356   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank));
357   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
358   if (iascii) {
359     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer));
360     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
361     for (i=0; i<mapping->n; i++) {
362       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]));
363     }
364     PetscCall(PetscViewerFlush(viewer));
365     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
366   }
367   PetscFunctionReturn(0);
368 }
369 
370 /*@
371     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
372     ordering and a global parallel ordering.
373 
374     Not collective
375 
376     Input Parameter:
377 .   is - index set containing the global numbers for each local number
378 
379     Output Parameter:
380 .   mapping - new mapping data structure
381 
382     Notes:
383     the block size of the IS determines the block size of the mapping
384     Level: advanced
385 
386 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetFromOptions()`
387 @*/
388 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
389 {
390   PetscInt       n,bs;
391   const PetscInt *indices;
392   MPI_Comm       comm;
393   PetscBool      isblock;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(is,IS_CLASSID,1);
397   PetscValidPointer(mapping,2);
398 
399   PetscCall(PetscObjectGetComm((PetscObject)is,&comm));
400   PetscCall(ISGetLocalSize(is,&n));
401   PetscCall(PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock));
402   if (!isblock) {
403     PetscCall(ISGetIndices(is,&indices));
404     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping));
405     PetscCall(ISRestoreIndices(is,&indices));
406   } else {
407     PetscCall(ISGetBlockSize(is,&bs));
408     PetscCall(ISBlockGetIndices(is,&indices));
409     PetscCall(ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping));
410     PetscCall(ISBlockRestoreIndices(is,&indices));
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 /*@C
416     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
417     ordering and a global parallel ordering.
418 
419     Collective
420 
421     Input Parameters:
422 +   sf - star forest mapping contiguous local indices to (rank, offset)
423 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
424 
425     Output Parameter:
426 .   mapping - new mapping data structure
427 
428     Level: advanced
429 
430     Notes:
431     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
432 
433 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`
434 @*/
435 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
436 {
437   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
438   MPI_Comm       comm;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
442   PetscValidPointer(mapping,3);
443   PetscCall(PetscObjectGetComm((PetscObject)sf,&comm));
444   PetscCall(PetscSFGetGraph(sf,&nroots,&nleaves,NULL,NULL));
445   if (start == PETSC_DECIDE) {
446     start = 0;
447     PetscCallMPI(MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm));
448   } else PetscCheck(start >= 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
449   PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal));
450   ++maxlocal;
451   PetscCall(PetscMalloc1(nroots,&globals));
452   PetscCall(PetscMalloc1(maxlocal,&ltog));
453   for (i=0; i<nroots; i++) globals[i] = start + i;
454   for (i=0; i<maxlocal; i++) ltog[i] = -1;
455   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
456   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
457   PetscCall(ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping));
458   PetscCall(PetscFree(globals));
459   PetscFunctionReturn(0);
460 }
461 
462 /*@
463     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
464 
465     Not collective
466 
467     Input Parameters:
468 +   mapping - mapping data structure
469 -   bs - the blocksize
470 
471     Level: advanced
472 
473 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
474 @*/
475 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
476 {
477   PetscInt       *nid;
478   const PetscInt *oid;
479   PetscInt       i,cn,on,obs,nn;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
483   PetscCheck(bs >= 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
484   if (bs == mapping->bs) PetscFunctionReturn(0);
485   on  = mapping->n;
486   obs = mapping->bs;
487   oid = mapping->indices;
488   nn  = (on*obs)/bs;
489   PetscCheck((on*obs)%bs == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
490 
491   PetscCall(PetscMalloc1(nn,&nid));
492   PetscCall(ISLocalToGlobalMappingGetIndices(mapping,&oid));
493   for (i=0;i<nn;i++) {
494     PetscInt j;
495     for (j=0,cn=0;j<bs-1;j++) {
496       if (oid[i*bs+j] < 0) { cn++; continue; }
497       PetscCheck(oid[i*bs+j] == oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
498     }
499     if (oid[i*bs+j] < 0) cn++;
500     if (cn) {
501       PetscCheck(cn == bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
502       nid[i] = -1;
503     } else {
504       nid[i] = oid[i*bs]/bs;
505     }
506   }
507   PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping,&oid));
508 
509   mapping->n           = nn;
510   mapping->bs          = bs;
511   PetscCall(PetscFree(mapping->indices));
512   mapping->indices     = nid;
513   mapping->globalstart = 0;
514   mapping->globalend   = 0;
515 
516   /* reset the cached information */
517   PetscCall(PetscFree(mapping->info_procs));
518   PetscCall(PetscFree(mapping->info_numprocs));
519   if (mapping->info_indices) {
520     PetscInt i;
521 
522     PetscCall(PetscFree((mapping->info_indices)[0]));
523     for (i=1; i<mapping->info_nproc; i++) {
524       PetscCall(PetscFree(mapping->info_indices[i]));
525     }
526     PetscCall(PetscFree(mapping->info_indices));
527   }
528   mapping->info_cached = PETSC_FALSE;
529 
530   if (mapping->ops->destroy) {
531     PetscCall((*mapping->ops->destroy)(mapping));
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 /*@
537     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
538     ordering and a global parallel ordering.
539 
540     Not Collective
541 
542     Input Parameters:
543 .   mapping - mapping data structure
544 
545     Output Parameter:
546 .   bs - the blocksize
547 
548     Level: advanced
549 
550 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`
551 @*/
552 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
553 {
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
556   *bs = mapping->bs;
557   PetscFunctionReturn(0);
558 }
559 
560 /*@
561     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
562     ordering and a global parallel ordering.
563 
564     Not Collective, but communicator may have more than one process
565 
566     Input Parameters:
567 +   comm - MPI communicator
568 .   bs - the block size
569 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
570 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
571 -   mode - see PetscCopyMode
572 
573     Output Parameter:
574 .   mapping - new mapping data structure
575 
576     Notes:
577     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
578 
579     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
580     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
581     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
582 
583     Level: advanced
584 
585 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingSetFromOptions()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
586           `ISLocalToGlobalMappingSetType()`, `ISLocalToGlobalMappingType`
587 @*/
588 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
589 {
590   PetscInt       *in;
591 
592   PetscFunctionBegin;
593   if (n) PetscValidIntPointer(indices,4);
594   PetscValidPointer(mapping,6);
595 
596   *mapping = NULL;
597   PetscCall(ISInitializePackage());
598 
599   PetscCall(PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView));
600   (*mapping)->n  = n;
601   (*mapping)->bs = bs;
602   if (mode == PETSC_COPY_VALUES) {
603     PetscCall(PetscMalloc1(n,&in));
604     PetscCall(PetscArraycpy(in,indices,n));
605     (*mapping)->indices = in;
606     (*mapping)->dealloc_indices = PETSC_TRUE;
607     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
608   } else if (mode == PETSC_OWN_POINTER) {
609     (*mapping)->indices = (PetscInt*)indices;
610     (*mapping)->dealloc_indices = PETSC_TRUE;
611     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
612   } else if (mode == PETSC_USE_POINTER) {
613     (*mapping)->indices = (PetscInt*)indices;
614   }
615   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
616   PetscFunctionReturn(0);
617 }
618 
619 PetscFunctionList ISLocalToGlobalMappingList = NULL;
620 
621 /*@
622    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
623 
624    Not collective
625 
626    Input Parameters:
627 .  mapping - mapping data structure
628 
629    Level: advanced
630 
631 @*/
632 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
633 {
634   char                       type[256];
635   ISLocalToGlobalMappingType defaulttype = "Not set";
636   PetscBool                  flg;
637 
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
640   PetscCall(ISLocalToGlobalMappingRegisterAll());
641   PetscObjectOptionsBegin((PetscObject)mapping);
642   PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg));
643   if (flg) {
644     PetscCall(ISLocalToGlobalMappingSetType(mapping,type));
645   }
646   PetscOptionsEnd();
647   PetscFunctionReturn(0);
648 }
649 
650 /*@
651    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
652    ordering and a global parallel ordering.
653 
654    Note Collective
655 
656    Input Parameters:
657 .  mapping - mapping data structure
658 
659    Level: advanced
660 
661 .seealso: `ISLocalToGlobalMappingCreate()`
662 @*/
663 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
664 {
665   PetscFunctionBegin;
666   if (!*mapping) PetscFunctionReturn(0);
667   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
668   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
669   if ((*mapping)->dealloc_indices) {
670     PetscCall(PetscFree((*mapping)->indices));
671   }
672   PetscCall(PetscFree((*mapping)->info_procs));
673   PetscCall(PetscFree((*mapping)->info_numprocs));
674   if ((*mapping)->info_indices) {
675     PetscInt i;
676 
677     PetscCall(PetscFree(((*mapping)->info_indices)[0]));
678     for (i=1; i<(*mapping)->info_nproc; i++) {
679       PetscCall(PetscFree(((*mapping)->info_indices)[i]));
680     }
681     PetscCall(PetscFree((*mapping)->info_indices));
682   }
683   if ((*mapping)->info_nodei) {
684     PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
685   }
686   PetscCall(PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei));
687   if ((*mapping)->ops->destroy) {
688     PetscCall((*(*mapping)->ops->destroy)(*mapping));
689   }
690   PetscCall(PetscHeaderDestroy(mapping));
691   *mapping = NULL;
692   PetscFunctionReturn(0);
693 }
694 
695 /*@
696     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
697     a new index set using the global numbering defined in an ISLocalToGlobalMapping
698     context.
699 
700     Collective on is
701 
702     Input Parameters:
703 +   mapping - mapping between local and global numbering
704 -   is - index set in local numbering
705 
706     Output Parameters:
707 .   newis - index set in global numbering
708 
709     Notes:
710     The output IS will have the same communicator of the input IS.
711 
712     Level: advanced
713 
714 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
715           `ISLocalToGlobalMappingDestroy()`, `ISGlobalToLocalMappingApply()`
716 @*/
717 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
718 {
719   PetscInt       n,*idxout;
720   const PetscInt *idxin;
721 
722   PetscFunctionBegin;
723   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
724   PetscValidHeaderSpecific(is,IS_CLASSID,2);
725   PetscValidPointer(newis,3);
726 
727   PetscCall(ISGetLocalSize(is,&n));
728   PetscCall(ISGetIndices(is,&idxin));
729   PetscCall(PetscMalloc1(n,&idxout));
730   PetscCall(ISLocalToGlobalMappingApply(mapping,n,idxin,idxout));
731   PetscCall(ISRestoreIndices(is,&idxin));
732   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis));
733   PetscFunctionReturn(0);
734 }
735 
736 /*@
737    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
738    and converts them to the global numbering.
739 
740    Not collective
741 
742    Input Parameters:
743 +  mapping - the local to global mapping context
744 .  N - number of integers
745 -  in - input indices in local numbering
746 
747    Output Parameter:
748 .  out - indices in global numbering
749 
750    Notes:
751    The in and out array parameters may be identical.
752 
753    Level: advanced
754 
755 .seealso: `ISLocalToGlobalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
756           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
757           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
758 
759 @*/
760 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
761 {
762   PetscInt i,bs,Nmax;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
766   bs   = mapping->bs;
767   Nmax = bs*mapping->n;
768   if (bs == 1) {
769     const PetscInt *idx = mapping->indices;
770     for (i=0; i<N; i++) {
771       if (in[i] < 0) {
772         out[i] = in[i];
773         continue;
774       }
775       PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
776       out[i] = idx[in[i]];
777     }
778   } else {
779     const PetscInt *idx = mapping->indices;
780     for (i=0; i<N; i++) {
781       if (in[i] < 0) {
782         out[i] = in[i];
783         continue;
784       }
785       PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
786       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
787     }
788   }
789   PetscFunctionReturn(0);
790 }
791 
792 /*@
793    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
794 
795    Not collective
796 
797    Input Parameters:
798 +  mapping - the local to global mapping context
799 .  N - number of integers
800 -  in - input indices in local block numbering
801 
802    Output Parameter:
803 .  out - indices in global block numbering
804 
805    Notes:
806    The in and out array parameters may be identical.
807 
808    Example:
809      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
810      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
811 
812    Level: advanced
813 
814 .seealso: `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingDestroy()`,
815           `ISLocalToGlobalMappingApplyIS()`, `AOCreateBasic()`, `AOApplicationToPetsc()`,
816           `AOPetscToApplication()`, `ISGlobalToLocalMappingApply()`
817 
818 @*/
819 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
820 {
821   PetscInt       i,Nmax;
822   const PetscInt *idx;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
826   Nmax = mapping->n;
827   idx = mapping->indices;
828   for (i=0; i<N; i++) {
829     if (in[i] < 0) {
830       out[i] = in[i];
831       continue;
832     }
833     PetscCheck(in[i] < Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
834     out[i] = idx[in[i]];
835   }
836   PetscFunctionReturn(0);
837 }
838 
839 /*@
840     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
841     specified with a global numbering.
842 
843     Not collective
844 
845     Input Parameters:
846 +   mapping - mapping between local and global numbering
847 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
848            IS_GTOLM_DROP - drops the indices with no local value from the output list
849 .   n - number of global indices to map
850 -   idx - global indices to map
851 
852     Output Parameters:
853 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
854 -   idxout - local index of each global index, one must pass in an array long enough
855              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
856              idxout == NULL to determine the required length (returned in nout)
857              and then allocate the required space and call ISGlobalToLocalMappingApply()
858              a second time to set the values.
859 
860     Notes:
861     Either nout or idxout may be NULL. idx and idxout may be identical.
862 
863     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
864     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
865     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
866 
867     Level: advanced
868 
869     Developer Note: The manual page states that idx and idxout may be identical but the calling
870        sequence declares idx as const so it cannot be the same as idxout.
871 
872 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApplyBlock()`, `ISLocalToGlobalMappingCreate()`,
873           `ISLocalToGlobalMappingDestroy()`
874 @*/
875 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
876 {
877   PetscFunctionBegin;
878   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
879   if (!mapping->data) {
880     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
881   }
882   PetscCall((*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout));
883   PetscFunctionReturn(0);
884 }
885 
886 /*@
887     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
888     a new index set using the local numbering defined in an ISLocalToGlobalMapping
889     context.
890 
891     Not collective
892 
893     Input Parameters:
894 +   mapping - mapping between local and global numbering
895 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
896            IS_GTOLM_DROP - drops the indices with no local value from the output list
897 -   is - index set in global numbering
898 
899     Output Parameters:
900 .   newis - index set in local numbering
901 
902     Notes:
903     The output IS will be sequential, as it encodes a purely local operation
904 
905     Level: advanced
906 
907 .seealso: `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
908           `ISLocalToGlobalMappingDestroy()`
909 @*/
910 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
911 {
912   PetscInt       n,nout,*idxout;
913   const PetscInt *idxin;
914 
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
917   PetscValidHeaderSpecific(is,IS_CLASSID,3);
918   PetscValidPointer(newis,4);
919 
920   PetscCall(ISGetLocalSize(is,&n));
921   PetscCall(ISGetIndices(is,&idxin));
922   if (type == IS_GTOLM_MASK) {
923     PetscCall(PetscMalloc1(n,&idxout));
924   } else {
925     PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL));
926     PetscCall(PetscMalloc1(nout,&idxout));
927   }
928   PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout));
929   PetscCall(ISRestoreIndices(is,&idxin));
930   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis));
931   PetscFunctionReturn(0);
932 }
933 
934 /*@
935     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
936     specified with a block global numbering.
937 
938     Not collective
939 
940     Input Parameters:
941 +   mapping - mapping between local and global numbering
942 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
943            IS_GTOLM_DROP - drops the indices with no local value from the output list
944 .   n - number of global indices to map
945 -   idx - global indices to map
946 
947     Output Parameters:
948 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
949 -   idxout - local index of each global index, one must pass in an array long enough
950              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
951              idxout == NULL to determine the required length (returned in nout)
952              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
953              a second time to set the values.
954 
955     Notes:
956     Either nout or idxout may be NULL. idx and idxout may be identical.
957 
958     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
959     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
960     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
961 
962     Level: advanced
963 
964     Developer Note: The manual page states that idx and idxout may be identical but the calling
965        sequence declares idx as const so it cannot be the same as idxout.
966 
967 .seealso: `ISLocalToGlobalMappingApply()`, `ISGlobalToLocalMappingApply()`, `ISLocalToGlobalMappingCreate()`,
968           `ISLocalToGlobalMappingDestroy()`
969 @*/
970 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
971                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
972 {
973   PetscFunctionBegin;
974   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
975   if (!mapping->data) {
976     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
977   }
978   PetscCall((*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout));
979   PetscFunctionReturn(0);
980 }
981 
982 /*@C
983     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
984      each index shared by more than one processor
985 
986     Collective on ISLocalToGlobalMapping
987 
988     Input Parameter:
989 .   mapping - the mapping from local to global indexing
990 
991     Output Parameters:
992 +   nproc - number of processors that are connected to this one
993 .   proc - neighboring processors
994 .   numproc - number of indices for each subdomain (processor)
995 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
996 
997     Level: advanced
998 
999     Fortran Usage:
1000 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1001 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1002           PetscInt indices[nproc][numprocmax],ierr)
1003         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1004         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1005 
1006 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1007           `ISLocalToGlobalMappingRestoreInfo()`
1008 @*/
1009 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1010 {
1011   PetscFunctionBegin;
1012   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1013   if (mapping->info_cached) {
1014     *nproc    = mapping->info_nproc;
1015     *procs    = mapping->info_procs;
1016     *numprocs = mapping->info_numprocs;
1017     *indices  = mapping->info_indices;
1018   } else {
1019     PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices));
1020   }
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1025 {
1026   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1027   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1028   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1029   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1030   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1031   PetscInt       first_procs,first_numprocs,*first_indices;
1032   MPI_Request    *recv_waits,*send_waits;
1033   MPI_Status     recv_status,*send_status,*recv_statuses;
1034   MPI_Comm       comm;
1035   PetscBool      debug = PETSC_FALSE;
1036 
1037   PetscFunctionBegin;
1038   PetscCall(PetscObjectGetComm((PetscObject)mapping,&comm));
1039   PetscCallMPI(MPI_Comm_size(comm,&size));
1040   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1041   if (size == 1) {
1042     *nproc         = 0;
1043     *procs         = NULL;
1044     PetscCall(PetscNew(numprocs));
1045     (*numprocs)[0] = 0;
1046     PetscCall(PetscNew(indices));
1047     (*indices)[0]  = NULL;
1048     /* save info for reuse */
1049     mapping->info_nproc = *nproc;
1050     mapping->info_procs = *procs;
1051     mapping->info_numprocs = *numprocs;
1052     mapping->info_indices = *indices;
1053     mapping->info_cached = PETSC_TRUE;
1054     PetscFunctionReturn(0);
1055   }
1056 
1057   PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL));
1058 
1059   /*
1060     Notes on ISLocalToGlobalMappingGetBlockInfo
1061 
1062     globally owned node - the nodes that have been assigned to this processor in global
1063            numbering, just for this routine.
1064 
1065     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1066            boundary (i.e. is has more than one local owner)
1067 
1068     locally owned node - node that exists on this processors subdomain
1069 
1070     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1071            local subdomain
1072   */
1073   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag1));
1074   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag2));
1075   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag3));
1076 
1077   for (i=0; i<n; i++) {
1078     if (lindices[i] > max) max = lindices[i];
1079   }
1080   PetscCall(MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm));
1081   Ng++;
1082   PetscCallMPI(MPI_Comm_size(comm,&size));
1083   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1084   scale  = Ng/size + 1;
1085   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1086   rstart = scale*rank;
1087 
1088   /* determine ownership ranges of global indices */
1089   PetscCall(PetscMalloc1(2*size,&nprocs));
1090   PetscCall(PetscArrayzero(nprocs,2*size));
1091 
1092   /* determine owners of each local node  */
1093   PetscCall(PetscMalloc1(n,&owner));
1094   for (i=0; i<n; i++) {
1095     proc             = lindices[i]/scale; /* processor that globally owns this index */
1096     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1097     owner[i]         = proc;
1098     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1099   }
1100   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1101   PetscCall(PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends));
1102 
1103   /* inform other processors of number of messages and max length*/
1104   PetscCall(PetscMaxSum(comm,nprocs,&nmax,&nrecvs));
1105   PetscCall(PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs));
1106 
1107   /* post receives for owned rows */
1108   PetscCall(PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs));
1109   PetscCall(PetscMalloc1(nrecvs+1,&recv_waits));
1110   for (i=0; i<nrecvs; i++) {
1111     PetscCallMPI(MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i));
1112   }
1113 
1114   /* pack messages containing lists of local nodes to owners */
1115   PetscCall(PetscMalloc1(2*n+1,&sends));
1116   PetscCall(PetscMalloc1(size+1,&starts));
1117   starts[0] = 0;
1118   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1119   for (i=0; i<n; i++) {
1120     sends[starts[owner[i]]++] = lindices[i];
1121     sends[starts[owner[i]]++] = i;
1122   }
1123   PetscCall(PetscFree(owner));
1124   starts[0] = 0;
1125   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1126 
1127   /* send the messages */
1128   PetscCall(PetscMalloc1(nsends+1,&send_waits));
1129   PetscCall(PetscMalloc1(nsends+1,&dest));
1130   cnt = 0;
1131   for (i=0; i<size; i++) {
1132     if (nprocs[2*i]) {
1133       PetscCallMPI(MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt));
1134       dest[cnt] = i;
1135       cnt++;
1136     }
1137   }
1138   PetscCall(PetscFree(starts));
1139 
1140   /* wait on receives */
1141   PetscCall(PetscMalloc1(nrecvs+1,&source));
1142   PetscCall(PetscMalloc1(nrecvs+1,&len));
1143   cnt  = nrecvs;
1144   PetscCall(PetscCalloc1(ng+1,&nownedsenders));
1145   while (cnt) {
1146     PetscCallMPI(MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status));
1147     /* unpack receives into our local space */
1148     PetscCallMPI(MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]));
1149     source[imdex] = recv_status.MPI_SOURCE;
1150     len[imdex]    = len[imdex]/2;
1151     /* count how many local owners for each of my global owned indices */
1152     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1153     cnt--;
1154   }
1155   PetscCall(PetscFree(recv_waits));
1156 
1157   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1158   nowned  = 0;
1159   nownedm = 0;
1160   for (i=0; i<ng; i++) {
1161     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1162   }
1163 
1164   /* create single array to contain rank of all local owners of each globally owned index */
1165   PetscCall(PetscMalloc1(nownedm+1,&ownedsenders));
1166   PetscCall(PetscMalloc1(ng+1,&starts));
1167   starts[0] = 0;
1168   for (i=1; i<ng; i++) {
1169     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1170     else starts[i] = starts[i-1];
1171   }
1172 
1173   /* for each nontrivial globally owned node list all arriving processors */
1174   for (i=0; i<nrecvs; i++) {
1175     for (j=0; j<len[i]; j++) {
1176       node = recvs[2*i*nmax+2*j]-rstart;
1177       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1178     }
1179   }
1180 
1181   if (debug) { /* -----------------------------------  */
1182     starts[0] = 0;
1183     for (i=1; i<ng; i++) {
1184       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1185       else starts[i] = starts[i-1];
1186     }
1187     for (i=0; i<ng; i++) {
1188       if (nownedsenders[i] > 1) {
1189         PetscCall(PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart));
1190         for (j=0; j<nownedsenders[i]; j++) {
1191           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]));
1192         }
1193         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1194       }
1195     }
1196     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1197   } /* -----------------------------------  */
1198 
1199   /* wait on original sends */
1200   if (nsends) {
1201     PetscCall(PetscMalloc1(nsends,&send_status));
1202     PetscCallMPI(MPI_Waitall(nsends,send_waits,send_status));
1203     PetscCall(PetscFree(send_status));
1204   }
1205   PetscCall(PetscFree(send_waits));
1206   PetscCall(PetscFree(sends));
1207   PetscCall(PetscFree(nprocs));
1208 
1209   /* pack messages to send back to local owners */
1210   starts[0] = 0;
1211   for (i=1; i<ng; i++) {
1212     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1213     else starts[i] = starts[i-1];
1214   }
1215   nsends2 = nrecvs;
1216   PetscCall(PetscMalloc1(nsends2+1,&nprocs)); /* length of each message */
1217   for (i=0; i<nrecvs; i++) {
1218     nprocs[i] = 1;
1219     for (j=0; j<len[i]; j++) {
1220       node = recvs[2*i*nmax+2*j]-rstart;
1221       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1222     }
1223   }
1224   nt = 0;
1225   for (i=0; i<nsends2; i++) nt += nprocs[i];
1226 
1227   PetscCall(PetscMalloc1(nt+1,&sends2));
1228   PetscCall(PetscMalloc1(nsends2+1,&starts2));
1229 
1230   starts2[0] = 0;
1231   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1232   /*
1233      Each message is 1 + nprocs[i] long, and consists of
1234        (0) the number of nodes being sent back
1235        (1) the local node number,
1236        (2) the number of processors sharing it,
1237        (3) the processors sharing it
1238   */
1239   for (i=0; i<nsends2; i++) {
1240     cnt = 1;
1241     sends2[starts2[i]] = 0;
1242     for (j=0; j<len[i]; j++) {
1243       node = recvs[2*i*nmax+2*j]-rstart;
1244       if (nownedsenders[node] > 1) {
1245         sends2[starts2[i]]++;
1246         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1247         sends2[starts2[i]+cnt++] = nownedsenders[node];
1248         PetscCall(PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]));
1249         cnt += nownedsenders[node];
1250       }
1251     }
1252   }
1253 
1254   /* receive the message lengths */
1255   nrecvs2 = nsends;
1256   PetscCall(PetscMalloc1(nrecvs2+1,&lens2));
1257   PetscCall(PetscMalloc1(nrecvs2+1,&starts3));
1258   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1259   for (i=0; i<nrecvs2; i++) {
1260     PetscCallMPI(MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i));
1261   }
1262 
1263   /* send the message lengths */
1264   for (i=0; i<nsends2; i++) {
1265     PetscCallMPI(MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm));
1266   }
1267 
1268   /* wait on receives of lens */
1269   if (nrecvs2) {
1270     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1271     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1272     PetscCall(PetscFree(recv_statuses));
1273   }
1274   PetscCall(PetscFree(recv_waits));
1275 
1276   starts3[0] = 0;
1277   nt         = 0;
1278   for (i=0; i<nrecvs2-1; i++) {
1279     starts3[i+1] = starts3[i] + lens2[i];
1280     nt          += lens2[i];
1281   }
1282   if (nrecvs2) nt += lens2[nrecvs2-1];
1283 
1284   PetscCall(PetscMalloc1(nt+1,&recvs2));
1285   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1286   for (i=0; i<nrecvs2; i++) {
1287     PetscCallMPI(MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i));
1288   }
1289 
1290   /* send the messages */
1291   PetscCall(PetscMalloc1(nsends2+1,&send_waits));
1292   for (i=0; i<nsends2; i++) {
1293     PetscCallMPI(MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i));
1294   }
1295 
1296   /* wait on receives */
1297   if (nrecvs2) {
1298     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1299     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1300     PetscCall(PetscFree(recv_statuses));
1301   }
1302   PetscCall(PetscFree(recv_waits));
1303   PetscCall(PetscFree(nprocs));
1304 
1305   if (debug) { /* -----------------------------------  */
1306     cnt = 0;
1307     for (i=0; i<nrecvs2; i++) {
1308       nt = recvs2[cnt++];
1309       for (j=0; j<nt; j++) {
1310         PetscCall(PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]));
1311         for (k=0; k<recvs2[cnt+1]; k++) {
1312           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]));
1313         }
1314         cnt += 2 + recvs2[cnt+1];
1315         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1316       }
1317     }
1318     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1319   } /* -----------------------------------  */
1320 
1321   /* count number subdomains for each local node */
1322   PetscCall(PetscCalloc1(size,&nprocs));
1323   cnt  = 0;
1324   for (i=0; i<nrecvs2; i++) {
1325     nt = recvs2[cnt++];
1326     for (j=0; j<nt; j++) {
1327       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1328       cnt += 2 + recvs2[cnt+1];
1329     }
1330   }
1331   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1332   *nproc    = nt;
1333   PetscCall(PetscMalloc1(nt+1,procs));
1334   PetscCall(PetscMalloc1(nt+1,numprocs));
1335   PetscCall(PetscMalloc1(nt+1,indices));
1336   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1337   PetscCall(PetscMalloc1(size,&bprocs));
1338   cnt  = 0;
1339   for (i=0; i<size; i++) {
1340     if (nprocs[i] > 0) {
1341       bprocs[i]        = cnt;
1342       (*procs)[cnt]    = i;
1343       (*numprocs)[cnt] = nprocs[i];
1344       PetscCall(PetscMalloc1(nprocs[i],&(*indices)[cnt]));
1345       cnt++;
1346     }
1347   }
1348 
1349   /* make the list of subdomains for each nontrivial local node */
1350   PetscCall(PetscArrayzero(*numprocs,nt));
1351   cnt  = 0;
1352   for (i=0; i<nrecvs2; i++) {
1353     nt = recvs2[cnt++];
1354     for (j=0; j<nt; j++) {
1355       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1356       cnt += 2 + recvs2[cnt+1];
1357     }
1358   }
1359   PetscCall(PetscFree(bprocs));
1360   PetscCall(PetscFree(recvs2));
1361 
1362   /* sort the node indexing by their global numbers */
1363   nt = *nproc;
1364   for (i=0; i<nt; i++) {
1365     PetscCall(PetscMalloc1((*numprocs)[i],&tmp));
1366     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1367     PetscCall(PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]));
1368     PetscCall(PetscFree(tmp));
1369   }
1370 
1371   if (debug) { /* -----------------------------------  */
1372     nt = *nproc;
1373     for (i=0; i<nt; i++) {
1374       PetscCall(PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]));
1375       for (j=0; j<(*numprocs)[i]; j++) {
1376         PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]));
1377       }
1378       PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1379     }
1380     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1381   } /* -----------------------------------  */
1382 
1383   /* wait on sends */
1384   if (nsends2) {
1385     PetscCall(PetscMalloc1(nsends2,&send_status));
1386     PetscCallMPI(MPI_Waitall(nsends2,send_waits,send_status));
1387     PetscCall(PetscFree(send_status));
1388   }
1389 
1390   PetscCall(PetscFree(starts3));
1391   PetscCall(PetscFree(dest));
1392   PetscCall(PetscFree(send_waits));
1393 
1394   PetscCall(PetscFree(nownedsenders));
1395   PetscCall(PetscFree(ownedsenders));
1396   PetscCall(PetscFree(starts));
1397   PetscCall(PetscFree(starts2));
1398   PetscCall(PetscFree(lens2));
1399 
1400   PetscCall(PetscFree(source));
1401   PetscCall(PetscFree(len));
1402   PetscCall(PetscFree(recvs));
1403   PetscCall(PetscFree(nprocs));
1404   PetscCall(PetscFree(sends2));
1405 
1406   /* put the information about myself as the first entry in the list */
1407   first_procs    = (*procs)[0];
1408   first_numprocs = (*numprocs)[0];
1409   first_indices  = (*indices)[0];
1410   for (i=0; i<*nproc; i++) {
1411     if ((*procs)[i] == rank) {
1412       (*procs)[0]    = (*procs)[i];
1413       (*numprocs)[0] = (*numprocs)[i];
1414       (*indices)[0]  = (*indices)[i];
1415       (*procs)[i]    = first_procs;
1416       (*numprocs)[i] = first_numprocs;
1417       (*indices)[i]  = first_indices;
1418       break;
1419     }
1420   }
1421 
1422   /* save info for reuse */
1423   mapping->info_nproc = *nproc;
1424   mapping->info_procs = *procs;
1425   mapping->info_numprocs = *numprocs;
1426   mapping->info_indices = *indices;
1427   mapping->info_cached = PETSC_TRUE;
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 /*@C
1432     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1433 
1434     Collective on ISLocalToGlobalMapping
1435 
1436     Input Parameter:
1437 .   mapping - the mapping from local to global indexing
1438 
1439     Output Parameters:
1440 +   nproc - number of processors that are connected to this one
1441 .   proc - neighboring processors
1442 .   numproc - number of indices for each processor
1443 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1444 
1445     Level: advanced
1446 
1447 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1448           `ISLocalToGlobalMappingGetInfo()`
1449 @*/
1450 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1451 {
1452   PetscFunctionBegin;
1453   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1454   if (mapping->info_free) {
1455     PetscCall(PetscFree(*numprocs));
1456     if (*indices) {
1457       PetscInt i;
1458 
1459       PetscCall(PetscFree((*indices)[0]));
1460       for (i=1; i<*nproc; i++) {
1461         PetscCall(PetscFree((*indices)[i]));
1462       }
1463       PetscCall(PetscFree(*indices));
1464     }
1465   }
1466   *nproc    = 0;
1467   *procs    = NULL;
1468   *numprocs = NULL;
1469   *indices  = NULL;
1470   PetscFunctionReturn(0);
1471 }
1472 
1473 /*@C
1474     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1475      each index shared by more than one processor
1476 
1477     Collective on ISLocalToGlobalMapping
1478 
1479     Input Parameter:
1480 .   mapping - the mapping from local to global indexing
1481 
1482     Output Parameters:
1483 +   nproc - number of processors that are connected to this one
1484 .   proc - neighboring processors
1485 .   numproc - number of indices for each subdomain (processor)
1486 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1487 
1488     Level: advanced
1489 
1490     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1491 
1492     Fortran Usage:
1493 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1494 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1495           PetscInt indices[nproc][numprocmax],ierr)
1496         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1497         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1498 
1499 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1500           `ISLocalToGlobalMappingRestoreInfo()`
1501 @*/
1502 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1503 {
1504   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs,i,j,k;
1505 
1506   PetscFunctionBegin;
1507   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1508   bs = mapping->bs;
1509   PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices));
1510   if (bs > 1) { /* we need to expand the cached info */
1511     PetscCall(PetscCalloc1(*nproc,&*indices));
1512     PetscCall(PetscCalloc1(*nproc,&*numprocs));
1513     for (i=0; i<*nproc; i++) {
1514       PetscCall(PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]));
1515       for (j=0; j<bnumprocs[i]; j++) {
1516         for (k=0; k<bs; k++) {
1517           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1518         }
1519       }
1520       (*numprocs)[i] = bnumprocs[i]*bs;
1521     }
1522     mapping->info_free = PETSC_TRUE;
1523   } else {
1524     *numprocs = bnumprocs;
1525     *indices  = bindices;
1526   }
1527   PetscFunctionReturn(0);
1528 }
1529 
1530 /*@C
1531     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1532 
1533     Collective on ISLocalToGlobalMapping
1534 
1535     Input Parameter:
1536 .   mapping - the mapping from local to global indexing
1537 
1538     Output Parameters:
1539 +   nproc - number of processors that are connected to this one
1540 .   proc - neighboring processors
1541 .   numproc - number of indices for each processor
1542 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1543 
1544     Level: advanced
1545 
1546 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1547           `ISLocalToGlobalMappingGetInfo()`
1548 @*/
1549 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1550 {
1551   PetscFunctionBegin;
1552   PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices));
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 /*@C
1557     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1558 
1559     Collective on ISLocalToGlobalMapping
1560 
1561     Input Parameter:
1562 .   mapping - the mapping from local to global indexing
1563 
1564     Output Parameters:
1565 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1566 .   count - number of neighboring processors per node
1567 -   indices - indices of processes sharing the node (sorted)
1568 
1569     Level: advanced
1570 
1571     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1572 
1573 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1574           `ISLocalToGlobalMappingGetInfo()`, `ISLocalToGlobalMappingRestoreNodeInfo()`
1575 @*/
1576 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1577 {
1578   PetscInt       n;
1579 
1580   PetscFunctionBegin;
1581   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1582   PetscCall(ISLocalToGlobalMappingGetSize(mapping,&n));
1583   if (!mapping->info_nodec) {
1584     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1585 
1586     PetscCall(PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei));
1587     PetscCall(ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1588     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1589     m = n;
1590     mapping->info_nodec[n] = 0;
1591     for (i=1;i<n_neigh;i++) {
1592       PetscInt j;
1593 
1594       m += n_shared[i];
1595       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1596     }
1597     if (n) PetscCall(PetscMalloc1(m,&mapping->info_nodei[0]));
1598     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1599     PetscCall(PetscArrayzero(mapping->info_nodec,n));
1600     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1601     for (i=1;i<n_neigh;i++) {
1602       PetscInt j;
1603 
1604       for (j=0;j<n_shared[i];j++) {
1605         PetscInt k = shared[i][j];
1606 
1607         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1608         mapping->info_nodec[k] += 1;
1609       }
1610     }
1611     for (i=0;i<n;i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]));
1612     PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1613   }
1614   if (nnodes)  *nnodes  = n;
1615   if (count)   *count   = mapping->info_nodec;
1616   if (indices) *indices = mapping->info_nodei;
1617   PetscFunctionReturn(0);
1618 }
1619 
1620 /*@C
1621     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1622 
1623     Collective on ISLocalToGlobalMapping
1624 
1625     Input Parameter:
1626 .   mapping - the mapping from local to global indexing
1627 
1628     Output Parameters:
1629 +   nnodes - number of local nodes
1630 .   count - number of neighboring processors per node
1631 -   indices - indices of processes sharing the node (sorted)
1632 
1633     Level: advanced
1634 
1635 .seealso: `ISLocalToGlobalMappingDestroy()`, `ISLocalToGlobalMappingCreateIS()`, `ISLocalToGlobalMappingCreate()`,
1636           `ISLocalToGlobalMappingGetInfo()`
1637 @*/
1638 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1639 {
1640   PetscFunctionBegin;
1641   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1642   if (nnodes)  *nnodes  = 0;
1643   if (count)   *count   = NULL;
1644   if (indices) *indices = NULL;
1645   PetscFunctionReturn(0);
1646 }
1647 
1648 /*@C
1649    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1650 
1651    Not Collective
1652 
1653    Input Parameter:
1654 . ltog - local to global mapping
1655 
1656    Output Parameter:
1657 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1658 
1659    Level: advanced
1660 
1661    Notes:
1662     ISLocalToGlobalMappingGetSize() returns the length the this array
1663 
1664 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreIndices()`, `ISLocalToGlobalMappingGetBlockIndices()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1665 @*/
1666 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1667 {
1668   PetscFunctionBegin;
1669   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1670   PetscValidPointer(array,2);
1671   if (ltog->bs == 1) {
1672     *array = ltog->indices;
1673   } else {
1674     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1675     const PetscInt *ii;
1676 
1677     PetscCall(PetscMalloc1(bs*n,&jj));
1678     *array = jj;
1679     k    = 0;
1680     ii   = ltog->indices;
1681     for (i=0; i<n; i++)
1682       for (j=0; j<bs; j++)
1683         jj[k++] = bs*ii[i] + j;
1684   }
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 /*@C
1689    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1690 
1691    Not Collective
1692 
1693    Input Parameters:
1694 + ltog - local to global mapping
1695 - array - array of indices
1696 
1697    Level: advanced
1698 
1699 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1700 @*/
1701 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1702 {
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1705   PetscValidPointer(array,2);
1706   PetscCheck(ltog->bs != 1 || *array == ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1707 
1708   if (ltog->bs > 1) {
1709     PetscCall(PetscFree(*(void**)array));
1710   }
1711   PetscFunctionReturn(0);
1712 }
1713 
1714 /*@C
1715    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1716 
1717    Not Collective
1718 
1719    Input Parameter:
1720 . ltog - local to global mapping
1721 
1722    Output Parameter:
1723 . array - array of indices
1724 
1725    Level: advanced
1726 
1727 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingRestoreBlockIndices()`
1728 @*/
1729 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1730 {
1731   PetscFunctionBegin;
1732   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1733   PetscValidPointer(array,2);
1734   *array = ltog->indices;
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 /*@C
1739    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1740 
1741    Not Collective
1742 
1743    Input Parameters:
1744 + ltog - local to global mapping
1745 - array - array of indices
1746 
1747    Level: advanced
1748 
1749 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingApply()`, `ISLocalToGlobalMappingGetIndices()`
1750 @*/
1751 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1752 {
1753   PetscFunctionBegin;
1754   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1755   PetscValidPointer(array,2);
1756   PetscCheck(*array == ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1757   *array = NULL;
1758   PetscFunctionReturn(0);
1759 }
1760 
1761 /*@C
1762    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1763 
1764    Not Collective
1765 
1766    Input Parameters:
1767 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1768 . n - number of mappings to concatenate
1769 - ltogs - local to global mappings
1770 
1771    Output Parameter:
1772 . ltogcat - new mapping
1773 
1774    Note: this currently always returns a mapping with block size of 1
1775 
1776    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1777 
1778    Level: advanced
1779 
1780 .seealso: `ISLocalToGlobalMappingCreate()`
1781 @*/
1782 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1783 {
1784   PetscInt       i,cnt,m,*idx;
1785 
1786   PetscFunctionBegin;
1787   PetscCheck(n >= 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1788   if (n > 0) PetscValidPointer(ltogs,3);
1789   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1790   PetscValidPointer(ltogcat,4);
1791   for (cnt=0,i=0; i<n; i++) {
1792     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1793     cnt += m;
1794   }
1795   PetscCall(PetscMalloc1(cnt,&idx));
1796   for (cnt=0,i=0; i<n; i++) {
1797     const PetscInt *subidx;
1798     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1799     PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx));
1800     PetscCall(PetscArraycpy(&idx[cnt],subidx,m));
1801     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx));
1802     cnt += m;
1803   }
1804   PetscCall(ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat));
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 /*MC
1809       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1810                                     used this is good for only small and moderate size problems.
1811 
1812    Options Database Keys:
1813 .   -islocaltoglobalmapping_type basic - select this method
1814 
1815    Level: beginner
1816 
1817 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1818 M*/
1819 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1820 {
1821   PetscFunctionBegin;
1822   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1823   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1824   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1825   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1826   PetscFunctionReturn(0);
1827 }
1828 
1829 /*MC
1830       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1831                                     used this is good for large memory problems.
1832 
1833    Options Database Keys:
1834 .   -islocaltoglobalmapping_type hash - select this method
1835 
1836    Notes:
1837     This is selected automatically for large problems if the user does not set the type.
1838 
1839    Level: beginner
1840 
1841 .seealso: `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`, `ISLOCALTOGLOBALMAPPINGHASH`
1842 M*/
1843 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1844 {
1845   PetscFunctionBegin;
1846   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1847   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1848   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1849   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1850   PetscFunctionReturn(0);
1851 }
1852 
1853 /*@C
1854     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1855 
1856    Not Collective
1857 
1858    Input Parameters:
1859 +  sname - name of a new method
1860 -  routine_create - routine to create method context
1861 
1862    Notes:
1863    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1864 
1865    Sample usage:
1866 .vb
1867    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1868 .ve
1869 
1870    Then, your mapping can be chosen with the procedural interface via
1871 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1872    or at runtime via the option
1873 $     -islocaltoglobalmapping_type my_mapper
1874 
1875    Level: advanced
1876 
1877 .seealso: `ISLocalToGlobalMappingRegisterAll()`, `ISLocalToGlobalMappingRegisterDestroy()`, `ISLOCALTOGLOBALMAPPINGBASIC`, `ISLOCALTOGLOBALMAPPINGHASH`
1878 
1879 @*/
1880 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1881 {
1882   PetscFunctionBegin;
1883   PetscCall(ISInitializePackage());
1884   PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function));
1885   PetscFunctionReturn(0);
1886 }
1887 
1888 /*@C
1889    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1890 
1891    Logically Collective on ISLocalToGlobalMapping
1892 
1893    Input Parameters:
1894 +  ltog - the ISLocalToGlobalMapping object
1895 -  type - a known method
1896 
1897    Options Database Key:
1898 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1899     of available methods (for instance, basic or hash)
1900 
1901    Notes:
1902    See "petsc/include/petscis.h" for available methods
1903 
1904   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1905   then set the ISLocalToGlobalMapping type from the options database rather than by using
1906   this routine.
1907 
1908   Level: intermediate
1909 
1910   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1911   are accessed by ISLocalToGlobalMappingSetType().
1912 
1913 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingGetType()`
1914 @*/
1915 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1916 {
1917   PetscBool      match;
1918   PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1919 
1920   PetscFunctionBegin;
1921   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1922   if (type) PetscValidCharPointer(type,2);
1923 
1924   PetscCall(PetscObjectTypeCompare((PetscObject)ltog,type,&match));
1925   if (match) PetscFunctionReturn(0);
1926 
1927   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1928   if (type) {
1929     PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r));
1930     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1931   }
1932   /* Destroy the previous private LTOG context */
1933   if (ltog->ops->destroy) {
1934     PetscCall((*ltog->ops->destroy)(ltog));
1935     ltog->ops->destroy = NULL;
1936   }
1937   PetscCall(PetscObjectChangeTypeName((PetscObject)ltog,type));
1938   if (r) PetscCall((*r)(ltog));
1939   PetscFunctionReturn(0);
1940 }
1941 
1942 /*@C
1943    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1944 
1945    Not Collective
1946 
1947    Input Parameter:
1948 .  ltog - the ISLocalToGlobalMapping object
1949 
1950    Output Parameter:
1951 .  type - the type
1952 
1953    Level: intermediate
1954 
1955 .seealso: `ISLocalToGlobalMappingType`, `ISLocalToGlobalMappingRegister()`, `ISLocalToGlobalMappingCreate()`, `ISLocalToGlobalMappingSetType()`
1956 @*/
1957 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1958 {
1959   PetscFunctionBegin;
1960   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1961   PetscValidPointer(type,2);
1962   *type = ((PetscObject)ltog)->type_name;
1963   PetscFunctionReturn(0);
1964 }
1965 
1966 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1967 
1968 /*@C
1969   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1970 
1971   Not Collective
1972 
1973   Level: advanced
1974 
1975 .seealso: `ISRegister()`, `ISLocalToGlobalRegister()`
1976 @*/
1977 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1978 {
1979   PetscFunctionBegin;
1980   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1981   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1982   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
1983   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
1984   PetscFunctionReturn(0);
1985 }
1986