xref: /petsc/src/vec/is/utils/isltog.c (revision 5c820da0a866dfd2d2d382a86bfaa6df0df955c6)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47 
48   PetscFunctionBeginHot;
49   *pStart = 0;
50   *points = NULL;
51   PetscCall(ISGetLocalSize(pointIS, &numCells));
52   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
53   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
54   *pEnd   = *pStart + numCells;
55   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
56   PetscFunctionReturn(0);
57 }
58 
59 /*@C
60   ISRestorePointRange - Destroys the traversal description
61 
62   Not collective
63 
64   Input Parameters:
65 + pointIS - The IS object
66 . pStart  - The first index, from ISGetPointRange()
67 . pEnd    - One past the last index, from ISGetPointRange()
68 - points  - The indices, from ISGetPointRange()
69 
70   Notes:
71   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
72 $ ISGetPointRange(is, &pStart, &pEnd, &points);
73 $ for (p = pStart; p < pEnd; ++p) {
74 $   const PetscInt point = points ? points[p] : p;
75 $ }
76 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
77 
78   Level: intermediate
79 
80 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
81 @*/
82 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
83 {
84   PetscInt       step = 1;
85   PetscBool      isStride;
86 
87   PetscFunctionBeginHot;
88   PetscCall(PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride));
89   if (isStride) PetscCall(ISStrideGetInfo(pointIS, pStart, &step));
90   if (!isStride || step != 1) PetscCall(ISGetIndices(pointIS, points));
91   PetscFunctionReturn(0);
92 }
93 
94 /*@C
95   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
96 
97   Not collective
98 
99   Input Parameters:
100 + subpointIS - The IS object to be configured
101 . pStar   t  - The first index of the subrange
102 . pEnd       - One past the last index for the subrange
103 - points     - The indices for the entire range, from ISGetPointRange()
104 
105   Output Parameters:
106 . subpointIS - The IS object now configured to be a subrange
107 
108   Notes:
109   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
110 
111   Level: intermediate
112 
113 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
114 @*/
115 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
116 {
117   PetscFunctionBeginHot;
118   if (points) {
119     PetscCall(ISSetType(subpointIS, ISGENERAL));
120     PetscCall(ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER));
121   } else {
122     PetscCall(ISSetType(subpointIS, ISSTRIDE));
123     PetscCall(ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1));
124   }
125   PetscFunctionReturn(0);
126 }
127 
128 /* -----------------------------------------------------------------------------------------*/
129 
130 /*
131     Creates the global mapping information in the ISLocalToGlobalMapping structure
132 
133     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
134 */
135 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
136 {
137   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
138 
139   PetscFunctionBegin;
140   if (mapping->data) PetscFunctionReturn(0);
141   end   = 0;
142   start = PETSC_MAX_INT;
143 
144   for (i=0; i<n; i++) {
145     if (idx[i] < 0) continue;
146     if (idx[i] < start) start = idx[i];
147     if (idx[i] > end)   end   = idx[i];
148   }
149   if (start > end) {start = 0; end = -1;}
150   mapping->globalstart = start;
151   mapping->globalend   = end;
152   if (!((PetscObject)mapping)->type_name) {
153     if ((end - start) > PetscMax(4*n,1000000)) {
154       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH));
155     } else {
156       PetscCall(ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC));
157     }
158   }
159   if (mapping->ops->globaltolocalmappingsetup) PetscCall((*mapping->ops->globaltolocalmappingsetup)(mapping));
160   PetscFunctionReturn(0);
161 }
162 
163 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
164 {
165   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
166   ISLocalToGlobalMapping_Basic *map;
167 
168   PetscFunctionBegin;
169   start            = mapping->globalstart;
170   end              = mapping->globalend;
171   PetscCall(PetscNew(&map));
172   PetscCall(PetscMalloc1(end-start+2,&globals));
173   map->globals     = globals;
174   for (i=0; i<end-start+1; i++) globals[i] = -1;
175   for (i=0; i<n; i++) {
176     if (idx[i] < 0) continue;
177     globals[idx[i] - start] = i;
178   }
179   mapping->data = (void*)map;
180   PetscCall(PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt)));
181   PetscFunctionReturn(0);
182 }
183 
184 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
185 {
186   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
187   ISLocalToGlobalMapping_Hash *map;
188 
189   PetscFunctionBegin;
190   PetscCall(PetscNew(&map));
191   PetscCall(PetscHMapICreate(&map->globalht));
192   for (i=0; i<n; i++) {
193     if (idx[i] < 0) continue;
194     PetscCall(PetscHMapISet(map->globalht,idx[i],i));
195   }
196   mapping->data = (void*)map;
197   PetscCall(PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt)));
198   PetscFunctionReturn(0);
199 }
200 
201 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
202 {
203   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
204 
205   PetscFunctionBegin;
206   if (!map) PetscFunctionReturn(0);
207   PetscCall(PetscFree(map->globals));
208   PetscCall(PetscFree(mapping->data));
209   PetscFunctionReturn(0);
210 }
211 
212 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
213 {
214   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
215 
216   PetscFunctionBegin;
217   if (!map) PetscFunctionReturn(0);
218   PetscCall(PetscHMapIDestroy(&map->globalht));
219   PetscCall(PetscFree(mapping->data));
220   PetscFunctionReturn(0);
221 }
222 
223 #define GTOLTYPE _Basic
224 #define GTOLNAME _Basic
225 #define GTOLBS mapping->bs
226 #define GTOL(g, local) do {                  \
227     local = map->globals[g/bs - start];      \
228     if (local >= 0) local = bs*local + (g % bs); \
229   } while (0)
230 
231 #include <../src/vec/is/utils/isltog.h>
232 
233 #define GTOLTYPE _Basic
234 #define GTOLNAME Block_Basic
235 #define GTOLBS 1
236 #define GTOL(g, local) do {                  \
237     local = map->globals[g - start];         \
238   } while (0)
239 #include <../src/vec/is/utils/isltog.h>
240 
241 #define GTOLTYPE _Hash
242 #define GTOLNAME _Hash
243 #define GTOLBS mapping->bs
244 #define GTOL(g, local) do {                         \
245     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
246     if (local >= 0) local = bs*local + (g % bs);   \
247    } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME Block_Hash
252 #define GTOLBS 1
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g,&local);    \
255   } while (0)
256 #include <../src/vec/is/utils/isltog.h>
257 
258 /*@
259     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
260 
261     Not Collective
262 
263     Input Parameter:
264 .   ltog - local to global mapping
265 
266     Output Parameter:
267 .   nltog - the duplicated local to global mapping
268 
269     Level: advanced
270 
271 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
272 @*/
273 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
274 {
275   ISLocalToGlobalMappingType l2gtype;
276 
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
279   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog));
280   PetscCall(ISLocalToGlobalMappingGetType(ltog,&l2gtype));
281   PetscCall(ISLocalToGlobalMappingSetType(*nltog,l2gtype));
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
287 
288     Not Collective
289 
290     Input Parameter:
291 .   ltog - local to global mapping
292 
293     Output Parameter:
294 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
295 
296     Level: advanced
297 
298 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
299 @*/
300 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
301 {
302   PetscFunctionBegin;
303   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
304   PetscValidIntPointer(n,2);
305   *n = mapping->bs*mapping->n;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@C
310    ISLocalToGlobalMappingViewFromOptions - View from Options
311 
312    Collective on ISLocalToGlobalMapping
313 
314    Input Parameters:
315 +  A - the local to global mapping object
316 .  obj - Optional object
317 -  name - command line option
318 
319    Level: intermediate
320 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
321 @*/
322 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
323 {
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
326   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
327   PetscFunctionReturn(0);
328 }
329 
330 /*@C
331     ISLocalToGlobalMappingView - View a local to global mapping
332 
333     Not Collective
334 
335     Input Parameters:
336 +   ltog - local to global mapping
337 -   viewer - viewer
338 
339     Level: advanced
340 
341 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
344 {
345   PetscInt       i;
346   PetscMPIInt    rank;
347   PetscBool      iascii;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
351   if (!viewer) {
352     PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer));
353   }
354   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
355 
356   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank));
357   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
358   if (iascii) {
359     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer));
360     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
361     for (i=0; i<mapping->n; i++) {
362       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]));
363     }
364     PetscCall(PetscViewerFlush(viewer));
365     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
366   }
367   PetscFunctionReturn(0);
368 }
369 
370 /*@
371     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
372     ordering and a global parallel ordering.
373 
374     Not collective
375 
376     Input Parameter:
377 .   is - index set containing the global numbers for each local number
378 
379     Output Parameter:
380 .   mapping - new mapping data structure
381 
382     Notes:
383     the block size of the IS determines the block size of the mapping
384     Level: advanced
385 
386 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
387 @*/
388 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
389 {
390   PetscInt       n,bs;
391   const PetscInt *indices;
392   MPI_Comm       comm;
393   PetscBool      isblock;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(is,IS_CLASSID,1);
397   PetscValidPointer(mapping,2);
398 
399   PetscCall(PetscObjectGetComm((PetscObject)is,&comm));
400   PetscCall(ISGetLocalSize(is,&n));
401   PetscCall(PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock));
402   if (!isblock) {
403     PetscCall(ISGetIndices(is,&indices));
404     PetscCall(ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping));
405     PetscCall(ISRestoreIndices(is,&indices));
406   } else {
407     PetscCall(ISGetBlockSize(is,&bs));
408     PetscCall(ISBlockGetIndices(is,&indices));
409     PetscCall(ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping));
410     PetscCall(ISBlockRestoreIndices(is,&indices));
411   }
412   PetscFunctionReturn(0);
413 }
414 
415 /*@C
416     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
417     ordering and a global parallel ordering.
418 
419     Collective
420 
421     Input Parameters:
422 +   sf - star forest mapping contiguous local indices to (rank, offset)
423 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
424 
425     Output Parameter:
426 .   mapping - new mapping data structure
427 
428     Level: advanced
429 
430     Notes:
431     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
432 
433 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
434 @*/
435 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
436 {
437   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
438   MPI_Comm       comm;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
442   PetscValidPointer(mapping,3);
443   PetscCall(PetscObjectGetComm((PetscObject)sf,&comm));
444   PetscCall(PetscSFGetGraph(sf,&nroots,&nleaves,NULL,NULL));
445   if (start == PETSC_DECIDE) {
446     start = 0;
447     PetscCallMPI(MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm));
448   } else PetscCheck(start >= 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
449   PetscCall(PetscSFGetLeafRange(sf, NULL, &maxlocal));
450   ++maxlocal;
451   PetscCall(PetscMalloc1(nroots,&globals));
452   PetscCall(PetscMalloc1(maxlocal,&ltog));
453   for (i=0; i<nroots; i++) globals[i] = start + i;
454   for (i=0; i<maxlocal; i++) ltog[i] = -1;
455   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
456   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE));
457   PetscCall(ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping));
458   PetscCall(PetscFree(globals));
459   PetscFunctionReturn(0);
460 }
461 
462 /*@
463     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
464 
465     Not collective
466 
467     Input Parameters:
468 +   mapping - mapping data structure
469 -   bs - the blocksize
470 
471     Level: advanced
472 
473 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
474 @*/
475 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
476 {
477   PetscInt       *nid;
478   const PetscInt *oid;
479   PetscInt       i,cn,on,obs,nn;
480 
481   PetscFunctionBegin;
482   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
483   PetscCheckFalse(bs < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
484   if (bs == mapping->bs) PetscFunctionReturn(0);
485   on  = mapping->n;
486   obs = mapping->bs;
487   oid = mapping->indices;
488   nn  = (on*obs)/bs;
489   PetscCheckFalse((on*obs)%bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
490 
491   PetscCall(PetscMalloc1(nn,&nid));
492   PetscCall(ISLocalToGlobalMappingGetIndices(mapping,&oid));
493   for (i=0;i<nn;i++) {
494     PetscInt j;
495     for (j=0,cn=0;j<bs-1;j++) {
496       if (oid[i*bs+j] < 0) { cn++; continue; }
497       PetscCheckFalse(oid[i*bs+j] != oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
498     }
499     if (oid[i*bs+j] < 0) cn++;
500     if (cn) {
501       PetscCheckFalse(cn != bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
502       nid[i] = -1;
503     } else {
504       nid[i] = oid[i*bs]/bs;
505     }
506   }
507   PetscCall(ISLocalToGlobalMappingRestoreIndices(mapping,&oid));
508 
509   mapping->n           = nn;
510   mapping->bs          = bs;
511   PetscCall(PetscFree(mapping->indices));
512   mapping->indices     = nid;
513   mapping->globalstart = 0;
514   mapping->globalend   = 0;
515 
516   /* reset the cached information */
517   PetscCall(PetscFree(mapping->info_procs));
518   PetscCall(PetscFree(mapping->info_numprocs));
519   if (mapping->info_indices) {
520     PetscInt i;
521 
522     PetscCall(PetscFree((mapping->info_indices)[0]));
523     for (i=1; i<mapping->info_nproc; i++) {
524       PetscCall(PetscFree(mapping->info_indices[i]));
525     }
526     PetscCall(PetscFree(mapping->info_indices));
527   }
528   mapping->info_cached = PETSC_FALSE;
529 
530   if (mapping->ops->destroy) {
531     PetscCall((*mapping->ops->destroy)(mapping));
532   }
533   PetscFunctionReturn(0);
534 }
535 
536 /*@
537     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
538     ordering and a global parallel ordering.
539 
540     Not Collective
541 
542     Input Parameters:
543 .   mapping - mapping data structure
544 
545     Output Parameter:
546 .   bs - the blocksize
547 
548     Level: advanced
549 
550 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
551 @*/
552 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
553 {
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
556   *bs = mapping->bs;
557   PetscFunctionReturn(0);
558 }
559 
560 /*@
561     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
562     ordering and a global parallel ordering.
563 
564     Not Collective, but communicator may have more than one process
565 
566     Input Parameters:
567 +   comm - MPI communicator
568 .   bs - the block size
569 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
570 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
571 -   mode - see PetscCopyMode
572 
573     Output Parameter:
574 .   mapping - new mapping data structure
575 
576     Notes:
577     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
578 
579     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
580     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
581     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
582 
583     Level: advanced
584 
585 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
586           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
587 @*/
588 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
589 {
590   PetscInt       *in;
591 
592   PetscFunctionBegin;
593   if (n) PetscValidIntPointer(indices,4);
594   PetscValidPointer(mapping,6);
595 
596   *mapping = NULL;
597   PetscCall(ISInitializePackage());
598 
599   PetscCall(PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView));
600   (*mapping)->n  = n;
601   (*mapping)->bs = bs;
602   if (mode == PETSC_COPY_VALUES) {
603     PetscCall(PetscMalloc1(n,&in));
604     PetscCall(PetscArraycpy(in,indices,n));
605     (*mapping)->indices = in;
606     (*mapping)->dealloc_indices = PETSC_TRUE;
607     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
608   } else if (mode == PETSC_OWN_POINTER) {
609     (*mapping)->indices = (PetscInt*)indices;
610     (*mapping)->dealloc_indices = PETSC_TRUE;
611     PetscCall(PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt)));
612   } else if (mode == PETSC_USE_POINTER) {
613     (*mapping)->indices = (PetscInt*)indices;
614   }
615   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
616   PetscFunctionReturn(0);
617 }
618 
619 PetscFunctionList ISLocalToGlobalMappingList = NULL;
620 
621 /*@
622    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
623 
624    Not collective
625 
626    Input Parameters:
627 .  mapping - mapping data structure
628 
629    Level: advanced
630 
631 @*/
632 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
633 {
634   PetscErrorCode             ierr;
635   char                       type[256];
636   ISLocalToGlobalMappingType defaulttype = "Not set";
637   PetscBool                  flg;
638 
639   PetscFunctionBegin;
640   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
641   PetscCall(ISLocalToGlobalMappingRegisterAll());
642   ierr = PetscObjectOptionsBegin((PetscObject)mapping);PetscCall(ierr);
643   PetscCall(PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg));
644   if (flg) {
645     PetscCall(ISLocalToGlobalMappingSetType(mapping,type));
646   }
647   ierr = PetscOptionsEnd();PetscCall(ierr);
648   PetscFunctionReturn(0);
649 }
650 
651 /*@
652    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
653    ordering and a global parallel ordering.
654 
655    Note Collective
656 
657    Input Parameters:
658 .  mapping - mapping data structure
659 
660    Level: advanced
661 
662 .seealso: ISLocalToGlobalMappingCreate()
663 @*/
664 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
665 {
666   PetscFunctionBegin;
667   if (!*mapping) PetscFunctionReturn(0);
668   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
669   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
670   if ((*mapping)->dealloc_indices) {
671     PetscCall(PetscFree((*mapping)->indices));
672   }
673   PetscCall(PetscFree((*mapping)->info_procs));
674   PetscCall(PetscFree((*mapping)->info_numprocs));
675   if ((*mapping)->info_indices) {
676     PetscInt i;
677 
678     PetscCall(PetscFree(((*mapping)->info_indices)[0]));
679     for (i=1; i<(*mapping)->info_nproc; i++) {
680       PetscCall(PetscFree(((*mapping)->info_indices)[i]));
681     }
682     PetscCall(PetscFree((*mapping)->info_indices));
683   }
684   if ((*mapping)->info_nodei) {
685     PetscCall(PetscFree(((*mapping)->info_nodei)[0]));
686   }
687   PetscCall(PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei));
688   if ((*mapping)->ops->destroy) {
689     PetscCall((*(*mapping)->ops->destroy)(*mapping));
690   }
691   PetscCall(PetscHeaderDestroy(mapping));
692   *mapping = NULL;
693   PetscFunctionReturn(0);
694 }
695 
696 /*@
697     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
698     a new index set using the global numbering defined in an ISLocalToGlobalMapping
699     context.
700 
701     Collective on is
702 
703     Input Parameters:
704 +   mapping - mapping between local and global numbering
705 -   is - index set in local numbering
706 
707     Output Parameters:
708 .   newis - index set in global numbering
709 
710     Notes:
711     The output IS will have the same communicator of the input IS.
712 
713     Level: advanced
714 
715 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
716           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
717 @*/
718 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
719 {
720   PetscInt       n,*idxout;
721   const PetscInt *idxin;
722 
723   PetscFunctionBegin;
724   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
725   PetscValidHeaderSpecific(is,IS_CLASSID,2);
726   PetscValidPointer(newis,3);
727 
728   PetscCall(ISGetLocalSize(is,&n));
729   PetscCall(ISGetIndices(is,&idxin));
730   PetscCall(PetscMalloc1(n,&idxout));
731   PetscCall(ISLocalToGlobalMappingApply(mapping,n,idxin,idxout));
732   PetscCall(ISRestoreIndices(is,&idxin));
733   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis));
734   PetscFunctionReturn(0);
735 }
736 
737 /*@
738    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
739    and converts them to the global numbering.
740 
741    Not collective
742 
743    Input Parameters:
744 +  mapping - the local to global mapping context
745 .  N - number of integers
746 -  in - input indices in local numbering
747 
748    Output Parameter:
749 .  out - indices in global numbering
750 
751    Notes:
752    The in and out array parameters may be identical.
753 
754    Level: advanced
755 
756 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
757           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
758           AOPetscToApplication(), ISGlobalToLocalMappingApply()
759 
760 @*/
761 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
762 {
763   PetscInt i,bs,Nmax;
764 
765   PetscFunctionBegin;
766   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
767   bs   = mapping->bs;
768   Nmax = bs*mapping->n;
769   if (bs == 1) {
770     const PetscInt *idx = mapping->indices;
771     for (i=0; i<N; i++) {
772       if (in[i] < 0) {
773         out[i] = in[i];
774         continue;
775       }
776       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
777       out[i] = idx[in[i]];
778     }
779   } else {
780     const PetscInt *idx = mapping->indices;
781     for (i=0; i<N; i++) {
782       if (in[i] < 0) {
783         out[i] = in[i];
784         continue;
785       }
786       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
787       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
788     }
789   }
790   PetscFunctionReturn(0);
791 }
792 
793 /*@
794    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
795 
796    Not collective
797 
798    Input Parameters:
799 +  mapping - the local to global mapping context
800 .  N - number of integers
801 -  in - input indices in local block numbering
802 
803    Output Parameter:
804 .  out - indices in global block numbering
805 
806    Notes:
807    The in and out array parameters may be identical.
808 
809    Example:
810      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
811      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
812 
813    Level: advanced
814 
815 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
816           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
817           AOPetscToApplication(), ISGlobalToLocalMappingApply()
818 
819 @*/
820 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
821 {
822   PetscInt       i,Nmax;
823   const PetscInt *idx;
824 
825   PetscFunctionBegin;
826   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
827   Nmax = mapping->n;
828   idx = mapping->indices;
829   for (i=0; i<N; i++) {
830     if (in[i] < 0) {
831       out[i] = in[i];
832       continue;
833     }
834     PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
835     out[i] = idx[in[i]];
836   }
837   PetscFunctionReturn(0);
838 }
839 
840 /*@
841     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
842     specified with a global numbering.
843 
844     Not collective
845 
846     Input Parameters:
847 +   mapping - mapping between local and global numbering
848 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
849            IS_GTOLM_DROP - drops the indices with no local value from the output list
850 .   n - number of global indices to map
851 -   idx - global indices to map
852 
853     Output Parameters:
854 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
855 -   idxout - local index of each global index, one must pass in an array long enough
856              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
857              idxout == NULL to determine the required length (returned in nout)
858              and then allocate the required space and call ISGlobalToLocalMappingApply()
859              a second time to set the values.
860 
861     Notes:
862     Either nout or idxout may be NULL. idx and idxout may be identical.
863 
864     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
865     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
866     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
867 
868     Level: advanced
869 
870     Developer Note: The manual page states that idx and idxout may be identical but the calling
871        sequence declares idx as const so it cannot be the same as idxout.
872 
873 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
874           ISLocalToGlobalMappingDestroy()
875 @*/
876 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
877 {
878   PetscFunctionBegin;
879   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
880   if (!mapping->data) {
881     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
882   }
883   PetscCall((*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout));
884   PetscFunctionReturn(0);
885 }
886 
887 /*@
888     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
889     a new index set using the local numbering defined in an ISLocalToGlobalMapping
890     context.
891 
892     Not collective
893 
894     Input Parameters:
895 +   mapping - mapping between local and global numbering
896 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
897            IS_GTOLM_DROP - drops the indices with no local value from the output list
898 -   is - index set in global numbering
899 
900     Output Parameters:
901 .   newis - index set in local numbering
902 
903     Notes:
904     The output IS will be sequential, as it encodes a purely local operation
905 
906     Level: advanced
907 
908 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
909           ISLocalToGlobalMappingDestroy()
910 @*/
911 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
912 {
913   PetscInt       n,nout,*idxout;
914   const PetscInt *idxin;
915 
916   PetscFunctionBegin;
917   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
918   PetscValidHeaderSpecific(is,IS_CLASSID,3);
919   PetscValidPointer(newis,4);
920 
921   PetscCall(ISGetLocalSize(is,&n));
922   PetscCall(ISGetIndices(is,&idxin));
923   if (type == IS_GTOLM_MASK) {
924     PetscCall(PetscMalloc1(n,&idxout));
925   } else {
926     PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL));
927     PetscCall(PetscMalloc1(nout,&idxout));
928   }
929   PetscCall(ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout));
930   PetscCall(ISRestoreIndices(is,&idxin));
931   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis));
932   PetscFunctionReturn(0);
933 }
934 
935 /*@
936     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
937     specified with a block global numbering.
938 
939     Not collective
940 
941     Input Parameters:
942 +   mapping - mapping between local and global numbering
943 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
944            IS_GTOLM_DROP - drops the indices with no local value from the output list
945 .   n - number of global indices to map
946 -   idx - global indices to map
947 
948     Output Parameters:
949 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
950 -   idxout - local index of each global index, one must pass in an array long enough
951              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
952              idxout == NULL to determine the required length (returned in nout)
953              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
954              a second time to set the values.
955 
956     Notes:
957     Either nout or idxout may be NULL. idx and idxout may be identical.
958 
959     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
960     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
961     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
962 
963     Level: advanced
964 
965     Developer Note: The manual page states that idx and idxout may be identical but the calling
966        sequence declares idx as const so it cannot be the same as idxout.
967 
968 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
969           ISLocalToGlobalMappingDestroy()
970 @*/
971 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
972                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
973 {
974   PetscFunctionBegin;
975   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
976   if (!mapping->data) {
977     PetscCall(ISGlobalToLocalMappingSetUp(mapping));
978   }
979   PetscCall((*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout));
980   PetscFunctionReturn(0);
981 }
982 
983 /*@C
984     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
985      each index shared by more than one processor
986 
987     Collective on ISLocalToGlobalMapping
988 
989     Input Parameter:
990 .   mapping - the mapping from local to global indexing
991 
992     Output Parameters:
993 +   nproc - number of processors that are connected to this one
994 .   proc - neighboring processors
995 .   numproc - number of indices for each subdomain (processor)
996 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
997 
998     Level: advanced
999 
1000     Fortran Usage:
1001 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1002 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1003           PetscInt indices[nproc][numprocmax],ierr)
1004         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1005         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1006 
1007 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1008           ISLocalToGlobalMappingRestoreInfo()
1009 @*/
1010 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1011 {
1012   PetscFunctionBegin;
1013   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1014   if (mapping->info_cached) {
1015     *nproc    = mapping->info_nproc;
1016     *procs    = mapping->info_procs;
1017     *numprocs = mapping->info_numprocs;
1018     *indices  = mapping->info_indices;
1019   } else {
1020     PetscCall(ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices));
1021   }
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1026 {
1027   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1028   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1029   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1030   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1031   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1032   PetscInt       first_procs,first_numprocs,*first_indices;
1033   MPI_Request    *recv_waits,*send_waits;
1034   MPI_Status     recv_status,*send_status,*recv_statuses;
1035   MPI_Comm       comm;
1036   PetscBool      debug = PETSC_FALSE;
1037 
1038   PetscFunctionBegin;
1039   PetscCall(PetscObjectGetComm((PetscObject)mapping,&comm));
1040   PetscCallMPI(MPI_Comm_size(comm,&size));
1041   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1042   if (size == 1) {
1043     *nproc         = 0;
1044     *procs         = NULL;
1045     PetscCall(PetscNew(numprocs));
1046     (*numprocs)[0] = 0;
1047     PetscCall(PetscNew(indices));
1048     (*indices)[0]  = NULL;
1049     /* save info for reuse */
1050     mapping->info_nproc = *nproc;
1051     mapping->info_procs = *procs;
1052     mapping->info_numprocs = *numprocs;
1053     mapping->info_indices = *indices;
1054     mapping->info_cached = PETSC_TRUE;
1055     PetscFunctionReturn(0);
1056   }
1057 
1058   PetscCall(PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL));
1059 
1060   /*
1061     Notes on ISLocalToGlobalMappingGetBlockInfo
1062 
1063     globally owned node - the nodes that have been assigned to this processor in global
1064            numbering, just for this routine.
1065 
1066     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1067            boundary (i.e. is has more than one local owner)
1068 
1069     locally owned node - node that exists on this processors subdomain
1070 
1071     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1072            local subdomain
1073   */
1074   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag1));
1075   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag2));
1076   PetscCall(PetscObjectGetNewTag((PetscObject)mapping,&tag3));
1077 
1078   for (i=0; i<n; i++) {
1079     if (lindices[i] > max) max = lindices[i];
1080   }
1081   PetscCall(MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm));
1082   Ng++;
1083   PetscCallMPI(MPI_Comm_size(comm,&size));
1084   PetscCallMPI(MPI_Comm_rank(comm,&rank));
1085   scale  = Ng/size + 1;
1086   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1087   rstart = scale*rank;
1088 
1089   /* determine ownership ranges of global indices */
1090   PetscCall(PetscMalloc1(2*size,&nprocs));
1091   PetscCall(PetscArrayzero(nprocs,2*size));
1092 
1093   /* determine owners of each local node  */
1094   PetscCall(PetscMalloc1(n,&owner));
1095   for (i=0; i<n; i++) {
1096     proc             = lindices[i]/scale; /* processor that globally owns this index */
1097     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1098     owner[i]         = proc;
1099     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1100   }
1101   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1102   PetscCall(PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends));
1103 
1104   /* inform other processors of number of messages and max length*/
1105   PetscCall(PetscMaxSum(comm,nprocs,&nmax,&nrecvs));
1106   PetscCall(PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs));
1107 
1108   /* post receives for owned rows */
1109   PetscCall(PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs));
1110   PetscCall(PetscMalloc1(nrecvs+1,&recv_waits));
1111   for (i=0; i<nrecvs; i++) {
1112     PetscCallMPI(MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i));
1113   }
1114 
1115   /* pack messages containing lists of local nodes to owners */
1116   PetscCall(PetscMalloc1(2*n+1,&sends));
1117   PetscCall(PetscMalloc1(size+1,&starts));
1118   starts[0] = 0;
1119   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1120   for (i=0; i<n; i++) {
1121     sends[starts[owner[i]]++] = lindices[i];
1122     sends[starts[owner[i]]++] = i;
1123   }
1124   PetscCall(PetscFree(owner));
1125   starts[0] = 0;
1126   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1127 
1128   /* send the messages */
1129   PetscCall(PetscMalloc1(nsends+1,&send_waits));
1130   PetscCall(PetscMalloc1(nsends+1,&dest));
1131   cnt = 0;
1132   for (i=0; i<size; i++) {
1133     if (nprocs[2*i]) {
1134       PetscCallMPI(MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt));
1135       dest[cnt] = i;
1136       cnt++;
1137     }
1138   }
1139   PetscCall(PetscFree(starts));
1140 
1141   /* wait on receives */
1142   PetscCall(PetscMalloc1(nrecvs+1,&source));
1143   PetscCall(PetscMalloc1(nrecvs+1,&len));
1144   cnt  = nrecvs;
1145   PetscCall(PetscCalloc1(ng+1,&nownedsenders));
1146   while (cnt) {
1147     PetscCallMPI(MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status));
1148     /* unpack receives into our local space */
1149     PetscCallMPI(MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]));
1150     source[imdex] = recv_status.MPI_SOURCE;
1151     len[imdex]    = len[imdex]/2;
1152     /* count how many local owners for each of my global owned indices */
1153     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1154     cnt--;
1155   }
1156   PetscCall(PetscFree(recv_waits));
1157 
1158   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1159   nowned  = 0;
1160   nownedm = 0;
1161   for (i=0; i<ng; i++) {
1162     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1163   }
1164 
1165   /* create single array to contain rank of all local owners of each globally owned index */
1166   PetscCall(PetscMalloc1(nownedm+1,&ownedsenders));
1167   PetscCall(PetscMalloc1(ng+1,&starts));
1168   starts[0] = 0;
1169   for (i=1; i<ng; i++) {
1170     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1171     else starts[i] = starts[i-1];
1172   }
1173 
1174   /* for each nontrival globally owned node list all arriving processors */
1175   for (i=0; i<nrecvs; i++) {
1176     for (j=0; j<len[i]; j++) {
1177       node = recvs[2*i*nmax+2*j]-rstart;
1178       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1179     }
1180   }
1181 
1182   if (debug) { /* -----------------------------------  */
1183     starts[0] = 0;
1184     for (i=1; i<ng; i++) {
1185       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1186       else starts[i] = starts[i-1];
1187     }
1188     for (i=0; i<ng; i++) {
1189       if (nownedsenders[i] > 1) {
1190         PetscCall(PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart));
1191         for (j=0; j<nownedsenders[i]; j++) {
1192           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]));
1193         }
1194         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1195       }
1196     }
1197     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1198   } /* -----------------------------------  */
1199 
1200   /* wait on original sends */
1201   if (nsends) {
1202     PetscCall(PetscMalloc1(nsends,&send_status));
1203     PetscCallMPI(MPI_Waitall(nsends,send_waits,send_status));
1204     PetscCall(PetscFree(send_status));
1205   }
1206   PetscCall(PetscFree(send_waits));
1207   PetscCall(PetscFree(sends));
1208   PetscCall(PetscFree(nprocs));
1209 
1210   /* pack messages to send back to local owners */
1211   starts[0] = 0;
1212   for (i=1; i<ng; i++) {
1213     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1214     else starts[i] = starts[i-1];
1215   }
1216   nsends2 = nrecvs;
1217   PetscCall(PetscMalloc1(nsends2+1,&nprocs)); /* length of each message */
1218   for (i=0; i<nrecvs; i++) {
1219     nprocs[i] = 1;
1220     for (j=0; j<len[i]; j++) {
1221       node = recvs[2*i*nmax+2*j]-rstart;
1222       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1223     }
1224   }
1225   nt = 0;
1226   for (i=0; i<nsends2; i++) nt += nprocs[i];
1227 
1228   PetscCall(PetscMalloc1(nt+1,&sends2));
1229   PetscCall(PetscMalloc1(nsends2+1,&starts2));
1230 
1231   starts2[0] = 0;
1232   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1233   /*
1234      Each message is 1 + nprocs[i] long, and consists of
1235        (0) the number of nodes being sent back
1236        (1) the local node number,
1237        (2) the number of processors sharing it,
1238        (3) the processors sharing it
1239   */
1240   for (i=0; i<nsends2; i++) {
1241     cnt = 1;
1242     sends2[starts2[i]] = 0;
1243     for (j=0; j<len[i]; j++) {
1244       node = recvs[2*i*nmax+2*j]-rstart;
1245       if (nownedsenders[node] > 1) {
1246         sends2[starts2[i]]++;
1247         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1248         sends2[starts2[i]+cnt++] = nownedsenders[node];
1249         PetscCall(PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]));
1250         cnt += nownedsenders[node];
1251       }
1252     }
1253   }
1254 
1255   /* receive the message lengths */
1256   nrecvs2 = nsends;
1257   PetscCall(PetscMalloc1(nrecvs2+1,&lens2));
1258   PetscCall(PetscMalloc1(nrecvs2+1,&starts3));
1259   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1260   for (i=0; i<nrecvs2; i++) {
1261     PetscCallMPI(MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i));
1262   }
1263 
1264   /* send the message lengths */
1265   for (i=0; i<nsends2; i++) {
1266     PetscCallMPI(MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm));
1267   }
1268 
1269   /* wait on receives of lens */
1270   if (nrecvs2) {
1271     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1272     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1273     PetscCall(PetscFree(recv_statuses));
1274   }
1275   PetscCall(PetscFree(recv_waits));
1276 
1277   starts3[0] = 0;
1278   nt         = 0;
1279   for (i=0; i<nrecvs2-1; i++) {
1280     starts3[i+1] = starts3[i] + lens2[i];
1281     nt          += lens2[i];
1282   }
1283   if (nrecvs2) nt += lens2[nrecvs2-1];
1284 
1285   PetscCall(PetscMalloc1(nt+1,&recvs2));
1286   PetscCall(PetscMalloc1(nrecvs2+1,&recv_waits));
1287   for (i=0; i<nrecvs2; i++) {
1288     PetscCallMPI(MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i));
1289   }
1290 
1291   /* send the messages */
1292   PetscCall(PetscMalloc1(nsends2+1,&send_waits));
1293   for (i=0; i<nsends2; i++) {
1294     PetscCallMPI(MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i));
1295   }
1296 
1297   /* wait on receives */
1298   if (nrecvs2) {
1299     PetscCall(PetscMalloc1(nrecvs2,&recv_statuses));
1300     PetscCallMPI(MPI_Waitall(nrecvs2,recv_waits,recv_statuses));
1301     PetscCall(PetscFree(recv_statuses));
1302   }
1303   PetscCall(PetscFree(recv_waits));
1304   PetscCall(PetscFree(nprocs));
1305 
1306   if (debug) { /* -----------------------------------  */
1307     cnt = 0;
1308     for (i=0; i<nrecvs2; i++) {
1309       nt = recvs2[cnt++];
1310       for (j=0; j<nt; j++) {
1311         PetscCall(PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]));
1312         for (k=0; k<recvs2[cnt+1]; k++) {
1313           PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]));
1314         }
1315         cnt += 2 + recvs2[cnt+1];
1316         PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1317       }
1318     }
1319     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1320   } /* -----------------------------------  */
1321 
1322   /* count number subdomains for each local node */
1323   PetscCall(PetscCalloc1(size,&nprocs));
1324   cnt  = 0;
1325   for (i=0; i<nrecvs2; i++) {
1326     nt = recvs2[cnt++];
1327     for (j=0; j<nt; j++) {
1328       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1329       cnt += 2 + recvs2[cnt+1];
1330     }
1331   }
1332   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1333   *nproc    = nt;
1334   PetscCall(PetscMalloc1(nt+1,procs));
1335   PetscCall(PetscMalloc1(nt+1,numprocs));
1336   PetscCall(PetscMalloc1(nt+1,indices));
1337   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1338   PetscCall(PetscMalloc1(size,&bprocs));
1339   cnt  = 0;
1340   for (i=0; i<size; i++) {
1341     if (nprocs[i] > 0) {
1342       bprocs[i]        = cnt;
1343       (*procs)[cnt]    = i;
1344       (*numprocs)[cnt] = nprocs[i];
1345       PetscCall(PetscMalloc1(nprocs[i],&(*indices)[cnt]));
1346       cnt++;
1347     }
1348   }
1349 
1350   /* make the list of subdomains for each nontrivial local node */
1351   PetscCall(PetscArrayzero(*numprocs,nt));
1352   cnt  = 0;
1353   for (i=0; i<nrecvs2; i++) {
1354     nt = recvs2[cnt++];
1355     for (j=0; j<nt; j++) {
1356       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1357       cnt += 2 + recvs2[cnt+1];
1358     }
1359   }
1360   PetscCall(PetscFree(bprocs));
1361   PetscCall(PetscFree(recvs2));
1362 
1363   /* sort the node indexing by their global numbers */
1364   nt = *nproc;
1365   for (i=0; i<nt; i++) {
1366     PetscCall(PetscMalloc1((*numprocs)[i],&tmp));
1367     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1368     PetscCall(PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]));
1369     PetscCall(PetscFree(tmp));
1370   }
1371 
1372   if (debug) { /* -----------------------------------  */
1373     nt = *nproc;
1374     for (i=0; i<nt; i++) {
1375       PetscCall(PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]));
1376       for (j=0; j<(*numprocs)[i]; j++) {
1377         PetscCall(PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]));
1378       }
1379       PetscCall(PetscSynchronizedPrintf(comm,"\n"));
1380     }
1381     PetscCall(PetscSynchronizedFlush(comm,PETSC_STDOUT));
1382   } /* -----------------------------------  */
1383 
1384   /* wait on sends */
1385   if (nsends2) {
1386     PetscCall(PetscMalloc1(nsends2,&send_status));
1387     PetscCallMPI(MPI_Waitall(nsends2,send_waits,send_status));
1388     PetscCall(PetscFree(send_status));
1389   }
1390 
1391   PetscCall(PetscFree(starts3));
1392   PetscCall(PetscFree(dest));
1393   PetscCall(PetscFree(send_waits));
1394 
1395   PetscCall(PetscFree(nownedsenders));
1396   PetscCall(PetscFree(ownedsenders));
1397   PetscCall(PetscFree(starts));
1398   PetscCall(PetscFree(starts2));
1399   PetscCall(PetscFree(lens2));
1400 
1401   PetscCall(PetscFree(source));
1402   PetscCall(PetscFree(len));
1403   PetscCall(PetscFree(recvs));
1404   PetscCall(PetscFree(nprocs));
1405   PetscCall(PetscFree(sends2));
1406 
1407   /* put the information about myself as the first entry in the list */
1408   first_procs    = (*procs)[0];
1409   first_numprocs = (*numprocs)[0];
1410   first_indices  = (*indices)[0];
1411   for (i=0; i<*nproc; i++) {
1412     if ((*procs)[i] == rank) {
1413       (*procs)[0]    = (*procs)[i];
1414       (*numprocs)[0] = (*numprocs)[i];
1415       (*indices)[0]  = (*indices)[i];
1416       (*procs)[i]    = first_procs;
1417       (*numprocs)[i] = first_numprocs;
1418       (*indices)[i]  = first_indices;
1419       break;
1420     }
1421   }
1422 
1423   /* save info for reuse */
1424   mapping->info_nproc = *nproc;
1425   mapping->info_procs = *procs;
1426   mapping->info_numprocs = *numprocs;
1427   mapping->info_indices = *indices;
1428   mapping->info_cached = PETSC_TRUE;
1429   PetscFunctionReturn(0);
1430 }
1431 
1432 /*@C
1433     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1434 
1435     Collective on ISLocalToGlobalMapping
1436 
1437     Input Parameter:
1438 .   mapping - the mapping from local to global indexing
1439 
1440     Output Parameters:
1441 +   nproc - number of processors that are connected to this one
1442 .   proc - neighboring processors
1443 .   numproc - number of indices for each processor
1444 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1445 
1446     Level: advanced
1447 
1448 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1449           ISLocalToGlobalMappingGetInfo()
1450 @*/
1451 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1452 {
1453   PetscFunctionBegin;
1454   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1455   if (mapping->info_free) {
1456     PetscCall(PetscFree(*numprocs));
1457     if (*indices) {
1458       PetscInt i;
1459 
1460       PetscCall(PetscFree((*indices)[0]));
1461       for (i=1; i<*nproc; i++) {
1462         PetscCall(PetscFree((*indices)[i]));
1463       }
1464       PetscCall(PetscFree(*indices));
1465     }
1466   }
1467   *nproc    = 0;
1468   *procs    = NULL;
1469   *numprocs = NULL;
1470   *indices  = NULL;
1471   PetscFunctionReturn(0);
1472 }
1473 
1474 /*@C
1475     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1476      each index shared by more than one processor
1477 
1478     Collective on ISLocalToGlobalMapping
1479 
1480     Input Parameter:
1481 .   mapping - the mapping from local to global indexing
1482 
1483     Output Parameters:
1484 +   nproc - number of processors that are connected to this one
1485 .   proc - neighboring processors
1486 .   numproc - number of indices for each subdomain (processor)
1487 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1488 
1489     Level: advanced
1490 
1491     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1492 
1493     Fortran Usage:
1494 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1495 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1496           PetscInt indices[nproc][numprocmax],ierr)
1497         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1498         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1499 
1500 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1501           ISLocalToGlobalMappingRestoreInfo()
1502 @*/
1503 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1504 {
1505   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs,i,j,k;
1506 
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1509   bs = mapping->bs;
1510   PetscCall(ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices));
1511   if (bs > 1) { /* we need to expand the cached info */
1512     PetscCall(PetscCalloc1(*nproc,&*indices));
1513     PetscCall(PetscCalloc1(*nproc,&*numprocs));
1514     for (i=0; i<*nproc; i++) {
1515       PetscCall(PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]));
1516       for (j=0; j<bnumprocs[i]; j++) {
1517         for (k=0; k<bs; k++) {
1518           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1519         }
1520       }
1521       (*numprocs)[i] = bnumprocs[i]*bs;
1522     }
1523     mapping->info_free = PETSC_TRUE;
1524   } else {
1525     *numprocs = bnumprocs;
1526     *indices  = bindices;
1527   }
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 /*@C
1532     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1533 
1534     Collective on ISLocalToGlobalMapping
1535 
1536     Input Parameter:
1537 .   mapping - the mapping from local to global indexing
1538 
1539     Output Parameters:
1540 +   nproc - number of processors that are connected to this one
1541 .   proc - neighboring processors
1542 .   numproc - number of indices for each processor
1543 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1544 
1545     Level: advanced
1546 
1547 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1548           ISLocalToGlobalMappingGetInfo()
1549 @*/
1550 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1551 {
1552   PetscFunctionBegin;
1553   PetscCall(ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices));
1554   PetscFunctionReturn(0);
1555 }
1556 
1557 /*@C
1558     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1559 
1560     Collective on ISLocalToGlobalMapping
1561 
1562     Input Parameter:
1563 .   mapping - the mapping from local to global indexing
1564 
1565     Output Parameters:
1566 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1567 .   count - number of neighboring processors per node
1568 -   indices - indices of processes sharing the node (sorted)
1569 
1570     Level: advanced
1571 
1572     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1573 
1574 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1575           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1576 @*/
1577 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1578 {
1579   PetscInt       n;
1580 
1581   PetscFunctionBegin;
1582   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1583   PetscCall(ISLocalToGlobalMappingGetSize(mapping,&n));
1584   if (!mapping->info_nodec) {
1585     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1586 
1587     PetscCall(PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei));
1588     PetscCall(ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1589     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1590     m = n;
1591     mapping->info_nodec[n] = 0;
1592     for (i=1;i<n_neigh;i++) {
1593       PetscInt j;
1594 
1595       m += n_shared[i];
1596       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1597     }
1598     if (n) PetscCall(PetscMalloc1(m,&mapping->info_nodei[0]));
1599     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1600     PetscCall(PetscArrayzero(mapping->info_nodec,n));
1601     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1602     for (i=1;i<n_neigh;i++) {
1603       PetscInt j;
1604 
1605       for (j=0;j<n_shared[i];j++) {
1606         PetscInt k = shared[i][j];
1607 
1608         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1609         mapping->info_nodec[k] += 1;
1610       }
1611     }
1612     for (i=0;i<n;i++) PetscCall(PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]));
1613     PetscCall(ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared));
1614   }
1615   if (nnodes)  *nnodes  = n;
1616   if (count)   *count   = mapping->info_nodec;
1617   if (indices) *indices = mapping->info_nodei;
1618   PetscFunctionReturn(0);
1619 }
1620 
1621 /*@C
1622     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1623 
1624     Collective on ISLocalToGlobalMapping
1625 
1626     Input Parameter:
1627 .   mapping - the mapping from local to global indexing
1628 
1629     Output Parameters:
1630 +   nnodes - number of local nodes
1631 .   count - number of neighboring processors per node
1632 -   indices - indices of processes sharing the node (sorted)
1633 
1634     Level: advanced
1635 
1636 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1637           ISLocalToGlobalMappingGetInfo()
1638 @*/
1639 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1640 {
1641   PetscFunctionBegin;
1642   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1643   if (nnodes)  *nnodes  = 0;
1644   if (count)   *count   = NULL;
1645   if (indices) *indices = NULL;
1646   PetscFunctionReturn(0);
1647 }
1648 
1649 /*@C
1650    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1651 
1652    Not Collective
1653 
1654    Input Parameter:
1655 . ltog - local to global mapping
1656 
1657    Output Parameter:
1658 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1659 
1660    Level: advanced
1661 
1662    Notes:
1663     ISLocalToGlobalMappingGetSize() returns the length the this array
1664 
1665 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1666 @*/
1667 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1668 {
1669   PetscFunctionBegin;
1670   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1671   PetscValidPointer(array,2);
1672   if (ltog->bs == 1) {
1673     *array = ltog->indices;
1674   } else {
1675     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1676     const PetscInt *ii;
1677 
1678     PetscCall(PetscMalloc1(bs*n,&jj));
1679     *array = jj;
1680     k    = 0;
1681     ii   = ltog->indices;
1682     for (i=0; i<n; i++)
1683       for (j=0; j<bs; j++)
1684         jj[k++] = bs*ii[i] + j;
1685   }
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 /*@C
1690    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1691 
1692    Not Collective
1693 
1694    Input Parameters:
1695 + ltog - local to global mapping
1696 - array - array of indices
1697 
1698    Level: advanced
1699 
1700 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1701 @*/
1702 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1703 {
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1706   PetscValidPointer(array,2);
1707   PetscCheckFalse(ltog->bs == 1 && *array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1708 
1709   if (ltog->bs > 1) {
1710     PetscCall(PetscFree(*(void**)array));
1711   }
1712   PetscFunctionReturn(0);
1713 }
1714 
1715 /*@C
1716    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1717 
1718    Not Collective
1719 
1720    Input Parameter:
1721 . ltog - local to global mapping
1722 
1723    Output Parameter:
1724 . array - array of indices
1725 
1726    Level: advanced
1727 
1728 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1729 @*/
1730 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1731 {
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1734   PetscValidPointer(array,2);
1735   *array = ltog->indices;
1736   PetscFunctionReturn(0);
1737 }
1738 
1739 /*@C
1740    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1741 
1742    Not Collective
1743 
1744    Input Parameters:
1745 + ltog - local to global mapping
1746 - array - array of indices
1747 
1748    Level: advanced
1749 
1750 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1751 @*/
1752 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1753 {
1754   PetscFunctionBegin;
1755   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1756   PetscValidPointer(array,2);
1757   PetscCheckFalse(*array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1758   *array = NULL;
1759   PetscFunctionReturn(0);
1760 }
1761 
1762 /*@C
1763    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1764 
1765    Not Collective
1766 
1767    Input Parameters:
1768 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1769 . n - number of mappings to concatenate
1770 - ltogs - local to global mappings
1771 
1772    Output Parameter:
1773 . ltogcat - new mapping
1774 
1775    Note: this currently always returns a mapping with block size of 1
1776 
1777    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1778 
1779    Level: advanced
1780 
1781 .seealso: ISLocalToGlobalMappingCreate()
1782 @*/
1783 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1784 {
1785   PetscInt       i,cnt,m,*idx;
1786 
1787   PetscFunctionBegin;
1788   PetscCheckFalse(n < 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1789   if (n > 0) PetscValidPointer(ltogs,3);
1790   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1791   PetscValidPointer(ltogcat,4);
1792   for (cnt=0,i=0; i<n; i++) {
1793     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1794     cnt += m;
1795   }
1796   PetscCall(PetscMalloc1(cnt,&idx));
1797   for (cnt=0,i=0; i<n; i++) {
1798     const PetscInt *subidx;
1799     PetscCall(ISLocalToGlobalMappingGetSize(ltogs[i],&m));
1800     PetscCall(ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx));
1801     PetscCall(PetscArraycpy(&idx[cnt],subidx,m));
1802     PetscCall(ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx));
1803     cnt += m;
1804   }
1805   PetscCall(ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat));
1806   PetscFunctionReturn(0);
1807 }
1808 
1809 /*MC
1810       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1811                                     used this is good for only small and moderate size problems.
1812 
1813    Options Database Keys:
1814 .   -islocaltoglobalmapping_type basic - select this method
1815 
1816    Level: beginner
1817 
1818 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1819 M*/
1820 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1821 {
1822   PetscFunctionBegin;
1823   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1824   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1825   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1826   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1827   PetscFunctionReturn(0);
1828 }
1829 
1830 /*MC
1831       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1832                                     used this is good for large memory problems.
1833 
1834    Options Database Keys:
1835 .   -islocaltoglobalmapping_type hash - select this method
1836 
1837    Notes:
1838     This is selected automatically for large problems if the user does not set the type.
1839 
1840    Level: beginner
1841 
1842 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1843 M*/
1844 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1845 {
1846   PetscFunctionBegin;
1847   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1848   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1849   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1850   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1851   PetscFunctionReturn(0);
1852 }
1853 
1854 /*@C
1855     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1856 
1857    Not Collective
1858 
1859    Input Parameters:
1860 +  sname - name of a new method
1861 -  routine_create - routine to create method context
1862 
1863    Notes:
1864    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1865 
1866    Sample usage:
1867 .vb
1868    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1869 .ve
1870 
1871    Then, your mapping can be chosen with the procedural interface via
1872 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1873    or at runtime via the option
1874 $     -islocaltoglobalmapping_type my_mapper
1875 
1876    Level: advanced
1877 
1878 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1879 
1880 @*/
1881 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1882 {
1883   PetscFunctionBegin;
1884   PetscCall(ISInitializePackage());
1885   PetscCall(PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function));
1886   PetscFunctionReturn(0);
1887 }
1888 
1889 /*@C
1890    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1891 
1892    Logically Collective on ISLocalToGlobalMapping
1893 
1894    Input Parameters:
1895 +  ltog - the ISLocalToGlobalMapping object
1896 -  type - a known method
1897 
1898    Options Database Key:
1899 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1900     of available methods (for instance, basic or hash)
1901 
1902    Notes:
1903    See "petsc/include/petscis.h" for available methods
1904 
1905   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1906   then set the ISLocalToGlobalMapping type from the options database rather than by using
1907   this routine.
1908 
1909   Level: intermediate
1910 
1911   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1912   are accessed by ISLocalToGlobalMappingSetType().
1913 
1914 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingGetType()
1915 @*/
1916 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1917 {
1918   PetscBool      match;
1919   PetscErrorCode (*r)(ISLocalToGlobalMapping) = NULL;
1920 
1921   PetscFunctionBegin;
1922   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1923   if (type) PetscValidCharPointer(type,2);
1924 
1925   PetscCall(PetscObjectTypeCompare((PetscObject)ltog,type,&match));
1926   if (match) PetscFunctionReturn(0);
1927 
1928   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1929   if (type) {
1930     PetscCall(PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r));
1931     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1932   }
1933   /* Destroy the previous private LTOG context */
1934   if (ltog->ops->destroy) {
1935     PetscCall((*ltog->ops->destroy)(ltog));
1936     ltog->ops->destroy = NULL;
1937   }
1938   PetscCall(PetscObjectChangeTypeName((PetscObject)ltog,type));
1939   if (r) PetscCall((*r)(ltog));
1940   PetscFunctionReturn(0);
1941 }
1942 
1943 /*@C
1944    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1945 
1946    Not Collective
1947 
1948    Input Parameter:
1949 .  ltog - the ISLocalToGlobalMapping object
1950 
1951    Output Parameter:
1952 .  type - the type
1953 
1954    Level: intermediate
1955 
1956 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType()
1957 @*/
1958 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1959 {
1960   PetscFunctionBegin;
1961   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1962   PetscValidPointer(type,2);
1963   *type = ((PetscObject)ltog)->type_name;
1964   PetscFunctionReturn(0);
1965 }
1966 
1967 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1968 
1969 /*@C
1970   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1971 
1972   Not Collective
1973 
1974   Level: advanced
1975 
1976 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1977 @*/
1978 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1979 {
1980   PetscFunctionBegin;
1981   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1982   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1983   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic));
1984   PetscCall(ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash));
1985   PetscFunctionReturn(0);
1986 }
1987