xref: /petsc/src/vec/is/utils/isltog.c (revision 5f64dca9724aa3d1c0232cee09f31525f14a35bc)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
20 {
21   PetscInt       numCells, step = 1;
22   PetscBool      isStride;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBeginHot;
26   *pStart = 0;
27   *points = NULL;
28   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
29   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
30   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
31   *pEnd   = *pStart + numCells;
32   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
33   PetscFunctionReturn(0);
34 }
35 
36 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
37 {
38   PetscInt       step = 1;
39   PetscBool      isStride;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBeginHot;
43   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
44   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
45   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
50 {
51   PetscErrorCode ierr;
52 
53   PetscFunctionBeginHot;
54   if (points) {
55     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
56     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
57   } else {
58     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
59     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 /* -----------------------------------------------------------------------------------------*/
65 
66 /*
67     Creates the global mapping information in the ISLocalToGlobalMapping structure
68 
69     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
70 */
71 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
72 {
73   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
74   PetscErrorCode ierr;
75 
76   PetscFunctionBegin;
77   if (mapping->data) PetscFunctionReturn(0);
78   end   = 0;
79   start = PETSC_MAX_INT;
80 
81   for (i=0; i<n; i++) {
82     if (idx[i] < 0) continue;
83     if (idx[i] < start) start = idx[i];
84     if (idx[i] > end)   end   = idx[i];
85   }
86   if (start > end) {start = 0; end = -1;}
87   mapping->globalstart = start;
88   mapping->globalend   = end;
89   if (!((PetscObject)mapping)->type_name) {
90     if ((end - start) > PetscMax(4*n,1000000)) {
91       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
92     } else {
93       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
94     }
95   }
96   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
101 {
102   PetscErrorCode              ierr;
103   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
104   ISLocalToGlobalMapping_Basic *map;
105 
106   PetscFunctionBegin;
107   start            = mapping->globalstart;
108   end              = mapping->globalend;
109   ierr             = PetscNew(&map);CHKERRQ(ierr);
110   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
111   map->globals     = globals;
112   for (i=0; i<end-start+1; i++) globals[i] = -1;
113   for (i=0; i<n; i++) {
114     if (idx[i] < 0) continue;
115     globals[idx[i] - start] = i;
116   }
117   mapping->data = (void*)map;
118   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
123 {
124   PetscErrorCode              ierr;
125   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
126   ISLocalToGlobalMapping_Hash *map;
127 
128   PetscFunctionBegin;
129   ierr = PetscNew(&map);CHKERRQ(ierr);
130   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
131   for (i=0; i<n; i++ ) {
132     if (idx[i] < 0) continue;
133     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
134   }
135   mapping->data = (void*)map;
136   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
141 {
142   PetscErrorCode              ierr;
143   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
144 
145   PetscFunctionBegin;
146   if (!map) PetscFunctionReturn(0);
147   ierr = PetscFree(map->globals);CHKERRQ(ierr);
148   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
153 {
154   PetscErrorCode              ierr;
155   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
156 
157   PetscFunctionBegin;
158   if (!map) PetscFunctionReturn(0);
159   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
160   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #define GTOLTYPE _Basic
165 #define GTOLNAME _Basic
166 #define GTOLBS mapping->bs
167 #define GTOL(g, local) do {                  \
168     local = map->globals[g/bs - start];      \
169     local = bs*local + (g % bs);             \
170   } while (0)
171 
172 #include <../src/vec/is/utils/isltog.h>
173 
174 #define GTOLTYPE _Basic
175 #define GTOLNAME Block_Basic
176 #define GTOLBS 1
177 #define GTOL(g, local) do {                  \
178     local = map->globals[g - start];         \
179   } while (0)
180 #include <../src/vec/is/utils/isltog.h>
181 
182 #define GTOLTYPE _Hash
183 #define GTOLNAME _Hash
184 #define GTOLBS mapping->bs
185 #define GTOL(g, local) do {                         \
186     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
187     local = bs*local + (g % bs);                    \
188    } while (0)
189 #include <../src/vec/is/utils/isltog.h>
190 
191 #define GTOLTYPE _Hash
192 #define GTOLNAME Block_Hash
193 #define GTOLBS 1
194 #define GTOL(g, local) do {                         \
195     (void)PetscHMapIGet(map->globalht,g,&local);    \
196   } while (0)
197 #include <../src/vec/is/utils/isltog.h>
198 
199 /*@
200     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
201 
202     Not Collective
203 
204     Input Parameter:
205 .   ltog - local to global mapping
206 
207     Output Parameter:
208 .   nltog - the duplicated local to global mapping
209 
210     Level: advanced
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
215 {
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
220   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 /*@
225     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
226 
227     Not Collective
228 
229     Input Parameter:
230 .   ltog - local to global mapping
231 
232     Output Parameter:
233 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
234 
235     Level: advanced
236 
237 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
238 @*/
239 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
240 {
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
243   PetscValidIntPointer(n,2);
244   *n = mapping->bs*mapping->n;
245   PetscFunctionReturn(0);
246 }
247 
248 /*@C
249     ISLocalToGlobalMappingView - View a local to global mapping
250 
251     Not Collective
252 
253     Input Parameters:
254 +   ltog - local to global mapping
255 -   viewer - viewer
256 
257     Level: advanced
258 
259 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
260 @*/
261 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
262 {
263   PetscInt       i;
264   PetscMPIInt    rank;
265   PetscBool      iascii;
266   PetscErrorCode ierr;
267 
268   PetscFunctionBegin;
269   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
270   if (!viewer) {
271     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
272   }
273   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
274 
275   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
276   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
277   if (iascii) {
278     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
279     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
280     for (i=0; i<mapping->n; i++) {
281       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
282     }
283     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
284     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 /*@
290     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
291     ordering and a global parallel ordering.
292 
293     Not collective
294 
295     Input Parameter:
296 .   is - index set containing the global numbers for each local number
297 
298     Output Parameter:
299 .   mapping - new mapping data structure
300 
301     Notes:
302     the block size of the IS determines the block size of the mapping
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
308 {
309   PetscErrorCode ierr;
310   PetscInt       n,bs;
311   const PetscInt *indices;
312   MPI_Comm       comm;
313   PetscBool      isblock;
314 
315   PetscFunctionBegin;
316   PetscValidHeaderSpecific(is,IS_CLASSID,1);
317   PetscValidPointer(mapping,2);
318 
319   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
320   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
321   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
322   if (!isblock) {
323     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
324     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
325     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
326   } else {
327     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
328     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
329     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
330     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
331   }
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
337     ordering and a global parallel ordering.
338 
339     Collective
340 
341     Input Parameter:
342 +   sf - star forest mapping contiguous local indices to (rank, offset)
343 -   start - first global index on this process
344 
345     Output Parameter:
346 .   mapping - new mapping data structure
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
351 @*/
352 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
353 {
354   PetscErrorCode ierr;
355   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
356   const PetscInt *ilocal;
357   MPI_Comm       comm;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
361   PetscValidPointer(mapping,3);
362 
363   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
364   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
365   if (ilocal) {
366     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
367   }
368   else maxlocal = nleaves;
369   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
370   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
371   for (i=0; i<nroots; i++) globals[i] = start + i;
372   for (i=0; i<maxlocal; i++) ltog[i] = -1;
373   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
374   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
376   ierr = PetscFree(globals);CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
382 
383     Not collective
384 
385     Input Parameters:
386 .   mapping - mapping data structure
387 .   bs - the blocksize
388 
389     Level: advanced
390 
391 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
392 @*/
393 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
394 {
395   PetscInt       *nid;
396   const PetscInt *oid;
397   PetscInt       i,cn,on,obs,nn;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
402   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
403   if (bs == mapping->bs) PetscFunctionReturn(0);
404   on  = mapping->n;
405   obs = mapping->bs;
406   oid = mapping->indices;
407   nn  = (on*obs)/bs;
408   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
409 
410   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
411   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
412   for (i=0;i<nn;i++) {
413     PetscInt j;
414     for (j=0,cn=0;j<bs-1;j++) {
415       if (oid[i*bs+j] < 0) { cn++; continue; }
416       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
417     }
418     if (oid[i*bs+j] < 0) cn++;
419     if (cn) {
420       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
421       nid[i] = -1;
422     } else {
423       nid[i] = oid[i*bs]/bs;
424     }
425   }
426   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
427 
428   mapping->n           = nn;
429   mapping->bs          = bs;
430   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
431   mapping->indices     = nid;
432   mapping->globalstart = 0;
433   mapping->globalend   = 0;
434 
435   /* reset the cached information */
436   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
437   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
438   if (mapping->info_indices) {
439     PetscInt i;
440 
441     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
442     for (i=1; i<mapping->info_nproc; i++) {
443       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
444     }
445     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
446   }
447   mapping->info_cached = PETSC_FALSE;
448 
449   if (mapping->ops->destroy) {
450     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
451   }
452   PetscFunctionReturn(0);
453 }
454 
455 /*@
456     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
457     ordering and a global parallel ordering.
458 
459     Not Collective
460 
461     Input Parameters:
462 .   mapping - mapping data structure
463 
464     Output Parameter:
465 .   bs - the blocksize
466 
467     Level: advanced
468 
469 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
470 @*/
471 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
472 {
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
475   *bs = mapping->bs;
476   PetscFunctionReturn(0);
477 }
478 
479 /*@
480     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
481     ordering and a global parallel ordering.
482 
483     Not Collective, but communicator may have more than one process
484 
485     Input Parameters:
486 +   comm - MPI communicator
487 .   bs - the block size
488 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
489 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
490 -   mode - see PetscCopyMode
491 
492     Output Parameter:
493 .   mapping - new mapping data structure
494 
495     Notes:
496     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
497 
498     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
499     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
500     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
501 
502     Level: advanced
503 
504 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
505           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
506 @*/
507 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
508 {
509   PetscErrorCode ierr;
510   PetscInt       *in;
511 
512   PetscFunctionBegin;
513   if (n) PetscValidIntPointer(indices,3);
514   PetscValidPointer(mapping,4);
515 
516   *mapping = NULL;
517   ierr = ISInitializePackage();CHKERRQ(ierr);
518 
519   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
520                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
521   (*mapping)->n             = n;
522   (*mapping)->bs            = bs;
523   (*mapping)->info_cached   = PETSC_FALSE;
524   (*mapping)->info_free     = PETSC_FALSE;
525   (*mapping)->info_procs    = NULL;
526   (*mapping)->info_numprocs = NULL;
527   (*mapping)->info_indices  = NULL;
528   (*mapping)->info_nodec    = NULL;
529   (*mapping)->info_nodei    = NULL;
530 
531   (*mapping)->ops->globaltolocalmappingapply      = NULL;
532   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
533   (*mapping)->ops->destroy                        = NULL;
534   if (mode == PETSC_COPY_VALUES) {
535     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
536     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
537     (*mapping)->indices = in;
538     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
539   } else if (mode == PETSC_OWN_POINTER) {
540     (*mapping)->indices = (PetscInt*)indices;
541     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
542   }
543   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
544   PetscFunctionReturn(0);
545 }
546 
547 PetscFunctionList ISLocalToGlobalMappingList = NULL;
548 
549 
550 /*@
551    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
552 
553    Not collective
554 
555    Input Parameters:
556 .  mapping - mapping data structure
557 
558    Level: advanced
559 
560 @*/
561 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
562 {
563   PetscErrorCode             ierr;
564   char                       type[256];
565   ISLocalToGlobalMappingType defaulttype = "Not set";
566   PetscBool                  flg;
567 
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
570   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
571   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
572   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
573   if (flg) {
574     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
575   }
576   ierr = PetscOptionsEnd();CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
582    ordering and a global parallel ordering.
583 
584    Note Collective
585 
586    Input Parameters:
587 .  mapping - mapping data structure
588 
589    Level: advanced
590 
591 .seealso: ISLocalToGlobalMappingCreate()
592 @*/
593 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
594 {
595   PetscErrorCode ierr;
596 
597   PetscFunctionBegin;
598   if (!*mapping) PetscFunctionReturn(0);
599   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
600   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
601   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
602   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
603   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
604   if ((*mapping)->info_indices) {
605     PetscInt i;
606 
607     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
608     for (i=1; i<(*mapping)->info_nproc; i++) {
609       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
610     }
611     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
612   }
613   if ((*mapping)->info_nodei) {
614     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
615   }
616   ierr = PetscFree((*mapping)->info_nodei);CHKERRQ(ierr);
617   ierr = PetscFree((*mapping)->info_nodec);CHKERRQ(ierr);
618   if ((*mapping)->ops->destroy) {
619     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
620   }
621   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
622   *mapping = 0;
623   PetscFunctionReturn(0);
624 }
625 
626 /*@
627     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
628     a new index set using the global numbering defined in an ISLocalToGlobalMapping
629     context.
630 
631     Collective on is
632 
633     Input Parameters:
634 +   mapping - mapping between local and global numbering
635 -   is - index set in local numbering
636 
637     Output Parameters:
638 .   newis - index set in global numbering
639 
640     Notes:
641     The output IS will have the same communicator of the input IS.
642 
643     Level: advanced
644 
645 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
646           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
647 @*/
648 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
649 {
650   PetscErrorCode ierr;
651   PetscInt       n,*idxout;
652   const PetscInt *idxin;
653 
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
656   PetscValidHeaderSpecific(is,IS_CLASSID,2);
657   PetscValidPointer(newis,3);
658 
659   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
660   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
661   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
662   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
663   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
664   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
665   PetscFunctionReturn(0);
666 }
667 
668 /*@
669    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
670    and converts them to the global numbering.
671 
672    Not collective
673 
674    Input Parameters:
675 +  mapping - the local to global mapping context
676 .  N - number of integers
677 -  in - input indices in local numbering
678 
679    Output Parameter:
680 .  out - indices in global numbering
681 
682    Notes:
683    The in and out array parameters may be identical.
684 
685    Level: advanced
686 
687 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
688           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
689           AOPetscToApplication(), ISGlobalToLocalMappingApply()
690 
691 @*/
692 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
693 {
694   PetscInt i,bs,Nmax;
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
698   bs   = mapping->bs;
699   Nmax = bs*mapping->n;
700   if (bs == 1) {
701     const PetscInt *idx = mapping->indices;
702     for (i=0; i<N; i++) {
703       if (in[i] < 0) {
704         out[i] = in[i];
705         continue;
706       }
707       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
708       out[i] = idx[in[i]];
709     }
710   } else {
711     const PetscInt *idx = mapping->indices;
712     for (i=0; i<N; i++) {
713       if (in[i] < 0) {
714         out[i] = in[i];
715         continue;
716       }
717       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
718       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
719     }
720   }
721   PetscFunctionReturn(0);
722 }
723 
724 /*@
725    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
726 
727    Not collective
728 
729    Input Parameters:
730 +  mapping - the local to global mapping context
731 .  N - number of integers
732 -  in - input indices in local block numbering
733 
734    Output Parameter:
735 .  out - indices in global block numbering
736 
737    Notes:
738    The in and out array parameters may be identical.
739 
740    Example:
741      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
742      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
743 
744    Level: advanced
745 
746 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
747           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
748           AOPetscToApplication(), ISGlobalToLocalMappingApply()
749 
750 @*/
751 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
752 {
753 
754   PetscFunctionBegin;
755   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
756   {
757     PetscInt i,Nmax = mapping->n;
758     const PetscInt *idx = mapping->indices;
759 
760     for (i=0; i<N; i++) {
761       if (in[i] < 0) {
762         out[i] = in[i];
763         continue;
764       }
765       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
766       out[i] = idx[in[i]];
767     }
768   }
769   PetscFunctionReturn(0);
770 }
771 
772 /*@
773     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
774     specified with a global numbering.
775 
776     Not collective
777 
778     Input Parameters:
779 +   mapping - mapping between local and global numbering
780 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
781            IS_GTOLM_DROP - drops the indices with no local value from the output list
782 .   n - number of global indices to map
783 -   idx - global indices to map
784 
785     Output Parameters:
786 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
787 -   idxout - local index of each global index, one must pass in an array long enough
788              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
789              idxout == NULL to determine the required length (returned in nout)
790              and then allocate the required space and call ISGlobalToLocalMappingApply()
791              a second time to set the values.
792 
793     Notes:
794     Either nout or idxout may be NULL. idx and idxout may be identical.
795 
796     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
797     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
798     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
799 
800     Level: advanced
801 
802     Developer Note: The manual page states that idx and idxout may be identical but the calling
803        sequence declares idx as const so it cannot be the same as idxout.
804 
805 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
806           ISLocalToGlobalMappingDestroy()
807 @*/
808 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
809 {
810   PetscErrorCode ierr;
811 
812   PetscFunctionBegin;
813   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
814   if (!mapping->data) {
815     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
816   }
817   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 /*@
822     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
823     a new index set using the local numbering defined in an ISLocalToGlobalMapping
824     context.
825 
826     Not collective
827 
828     Input Parameters:
829 +   mapping - mapping between local and global numbering
830 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
831            IS_GTOLM_DROP - drops the indices with no local value from the output list
832 -   is - index set in global numbering
833 
834     Output Parameters:
835 .   newis - index set in local numbering
836 
837     Notes:
838     The output IS will be sequential, as it encodes a purely local operation
839 
840     Level: advanced
841 
842 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
843           ISLocalToGlobalMappingDestroy()
844 @*/
845 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
846 {
847   PetscErrorCode ierr;
848   PetscInt       n,nout,*idxout;
849   const PetscInt *idxin;
850 
851   PetscFunctionBegin;
852   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
853   PetscValidHeaderSpecific(is,IS_CLASSID,3);
854   PetscValidPointer(newis,4);
855 
856   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
857   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
858   if (type == IS_GTOLM_MASK) {
859     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
860   } else {
861     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
862     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
863   }
864   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
865   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
866   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 /*@
871     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
872     specified with a block global numbering.
873 
874     Not collective
875 
876     Input Parameters:
877 +   mapping - mapping between local and global numbering
878 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
879            IS_GTOLM_DROP - drops the indices with no local value from the output list
880 .   n - number of global indices to map
881 -   idx - global indices to map
882 
883     Output Parameters:
884 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
885 -   idxout - local index of each global index, one must pass in an array long enough
886              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
887              idxout == NULL to determine the required length (returned in nout)
888              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
889              a second time to set the values.
890 
891     Notes:
892     Either nout or idxout may be NULL. idx and idxout may be identical.
893 
894     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
895     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
896     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
897 
898     Level: advanced
899 
900     Developer Note: The manual page states that idx and idxout may be identical but the calling
901        sequence declares idx as const so it cannot be the same as idxout.
902 
903 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
904           ISLocalToGlobalMappingDestroy()
905 @*/
906 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
907                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
908 {
909   PetscErrorCode ierr;
910 
911   PetscFunctionBegin;
912   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
913   if (!mapping->data) {
914     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
915   }
916   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
917   PetscFunctionReturn(0);
918 }
919 
920 
921 /*@C
922     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
923      each index shared by more than one processor
924 
925     Collective on ISLocalToGlobalMapping
926 
927     Input Parameters:
928 .   mapping - the mapping from local to global indexing
929 
930     Output Parameter:
931 +   nproc - number of processors that are connected to this one
932 .   proc - neighboring processors
933 .   numproc - number of indices for each subdomain (processor)
934 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
935 
936     Level: advanced
937 
938     Fortran Usage:
939 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
940 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
941           PetscInt indices[nproc][numprocmax],ierr)
942         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
943         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
944 
945 
946 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
947           ISLocalToGlobalMappingRestoreInfo()
948 @*/
949 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
955   if (mapping->info_cached) {
956     *nproc    = mapping->info_nproc;
957     *procs    = mapping->info_procs;
958     *numprocs = mapping->info_numprocs;
959     *indices  = mapping->info_indices;
960   } else {
961     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
962   }
963   PetscFunctionReturn(0);
964 }
965 
966 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
967 {
968   PetscErrorCode ierr;
969   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
970   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
971   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
972   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
973   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
974   PetscInt       first_procs,first_numprocs,*first_indices;
975   MPI_Request    *recv_waits,*send_waits;
976   MPI_Status     recv_status,*send_status,*recv_statuses;
977   MPI_Comm       comm;
978   PetscBool      debug = PETSC_FALSE;
979 
980   PetscFunctionBegin;
981   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
982   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
983   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
984   if (size == 1) {
985     *nproc         = 0;
986     *procs         = NULL;
987     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
988     (*numprocs)[0] = 0;
989     ierr           = PetscNew(indices);CHKERRQ(ierr);
990     (*indices)[0]  = NULL;
991     /* save info for reuse */
992     mapping->info_nproc = *nproc;
993     mapping->info_procs = *procs;
994     mapping->info_numprocs = *numprocs;
995     mapping->info_indices = *indices;
996     mapping->info_cached = PETSC_TRUE;
997     PetscFunctionReturn(0);
998   }
999 
1000   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1001 
1002   /*
1003     Notes on ISLocalToGlobalMappingGetBlockInfo
1004 
1005     globally owned node - the nodes that have been assigned to this processor in global
1006            numbering, just for this routine.
1007 
1008     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1009            boundary (i.e. is has more than one local owner)
1010 
1011     locally owned node - node that exists on this processors subdomain
1012 
1013     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1014            local subdomain
1015   */
1016   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1017   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1018   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1019 
1020   for (i=0; i<n; i++) {
1021     if (lindices[i] > max) max = lindices[i];
1022   }
1023   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1024   Ng++;
1025   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1026   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1027   scale  = Ng/size + 1;
1028   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1029   rstart = scale*rank;
1030 
1031   /* determine ownership ranges of global indices */
1032   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1033   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1034 
1035   /* determine owners of each local node  */
1036   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1037   for (i=0; i<n; i++) {
1038     proc             = lindices[i]/scale; /* processor that globally owns this index */
1039     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1040     owner[i]         = proc;
1041     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1042   }
1043   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1044   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1045 
1046   /* inform other processors of number of messages and max length*/
1047   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1048   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1049 
1050   /* post receives for owned rows */
1051   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1052   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1053   for (i=0; i<nrecvs; i++) {
1054     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1055   }
1056 
1057   /* pack messages containing lists of local nodes to owners */
1058   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1059   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1060   starts[0] = 0;
1061   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1062   for (i=0; i<n; i++) {
1063     sends[starts[owner[i]]++] = lindices[i];
1064     sends[starts[owner[i]]++] = i;
1065   }
1066   ierr = PetscFree(owner);CHKERRQ(ierr);
1067   starts[0] = 0;
1068   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1069 
1070   /* send the messages */
1071   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1072   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1073   cnt = 0;
1074   for (i=0; i<size; i++) {
1075     if (nprocs[2*i]) {
1076       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1077       dest[cnt] = i;
1078       cnt++;
1079     }
1080   }
1081   ierr = PetscFree(starts);CHKERRQ(ierr);
1082 
1083   /* wait on receives */
1084   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1085   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1086   cnt  = nrecvs;
1087   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1088   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1089   while (cnt) {
1090     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1091     /* unpack receives into our local space */
1092     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1093     source[imdex] = recv_status.MPI_SOURCE;
1094     len[imdex]    = len[imdex]/2;
1095     /* count how many local owners for each of my global owned indices */
1096     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1097     cnt--;
1098   }
1099   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1100 
1101   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1102   nowned  = 0;
1103   nownedm = 0;
1104   for (i=0; i<ng; i++) {
1105     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1106   }
1107 
1108   /* create single array to contain rank of all local owners of each globally owned index */
1109   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1110   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1111   starts[0] = 0;
1112   for (i=1; i<ng; i++) {
1113     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1114     else starts[i] = starts[i-1];
1115   }
1116 
1117   /* for each nontrival globally owned node list all arriving processors */
1118   for (i=0; i<nrecvs; i++) {
1119     for (j=0; j<len[i]; j++) {
1120       node = recvs[2*i*nmax+2*j]-rstart;
1121       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1122     }
1123   }
1124 
1125   if (debug) { /* -----------------------------------  */
1126     starts[0] = 0;
1127     for (i=1; i<ng; i++) {
1128       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1129       else starts[i] = starts[i-1];
1130     }
1131     for (i=0; i<ng; i++) {
1132       if (nownedsenders[i] > 1) {
1133         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1134         for (j=0; j<nownedsenders[i]; j++) {
1135           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1136         }
1137         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1138       }
1139     }
1140     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1141   } /* -----------------------------------  */
1142 
1143   /* wait on original sends */
1144   if (nsends) {
1145     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1146     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1147     ierr = PetscFree(send_status);CHKERRQ(ierr);
1148   }
1149   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1150   ierr = PetscFree(sends);CHKERRQ(ierr);
1151   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1152 
1153   /* pack messages to send back to local owners */
1154   starts[0] = 0;
1155   for (i=1; i<ng; i++) {
1156     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1157     else starts[i] = starts[i-1];
1158   }
1159   nsends2 = nrecvs;
1160   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1161   for (i=0; i<nrecvs; i++) {
1162     nprocs[i] = 1;
1163     for (j=0; j<len[i]; j++) {
1164       node = recvs[2*i*nmax+2*j]-rstart;
1165       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1166     }
1167   }
1168   nt = 0;
1169   for (i=0; i<nsends2; i++) nt += nprocs[i];
1170 
1171   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1172   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1173 
1174   starts2[0] = 0;
1175   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1176   /*
1177      Each message is 1 + nprocs[i] long, and consists of
1178        (0) the number of nodes being sent back
1179        (1) the local node number,
1180        (2) the number of processors sharing it,
1181        (3) the processors sharing it
1182   */
1183   for (i=0; i<nsends2; i++) {
1184     cnt = 1;
1185     sends2[starts2[i]] = 0;
1186     for (j=0; j<len[i]; j++) {
1187       node = recvs[2*i*nmax+2*j]-rstart;
1188       if (nownedsenders[node] > 1) {
1189         sends2[starts2[i]]++;
1190         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1191         sends2[starts2[i]+cnt++] = nownedsenders[node];
1192         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1193         cnt += nownedsenders[node];
1194       }
1195     }
1196   }
1197 
1198   /* receive the message lengths */
1199   nrecvs2 = nsends;
1200   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1201   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1202   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1203   for (i=0; i<nrecvs2; i++) {
1204     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1205   }
1206 
1207   /* send the message lengths */
1208   for (i=0; i<nsends2; i++) {
1209     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1210   }
1211 
1212   /* wait on receives of lens */
1213   if (nrecvs2) {
1214     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1215     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1216     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1217   }
1218   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1219 
1220   starts3[0] = 0;
1221   nt         = 0;
1222   for (i=0; i<nrecvs2-1; i++) {
1223     starts3[i+1] = starts3[i] + lens2[i];
1224     nt          += lens2[i];
1225   }
1226   if (nrecvs2) nt += lens2[nrecvs2-1];
1227 
1228   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1229   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1230   for (i=0; i<nrecvs2; i++) {
1231     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1232   }
1233 
1234   /* send the messages */
1235   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1236   for (i=0; i<nsends2; i++) {
1237     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1238   }
1239 
1240   /* wait on receives */
1241   if (nrecvs2) {
1242     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1243     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1244     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1245   }
1246   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1247   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1248 
1249   if (debug) { /* -----------------------------------  */
1250     cnt = 0;
1251     for (i=0; i<nrecvs2; i++) {
1252       nt = recvs2[cnt++];
1253       for (j=0; j<nt; j++) {
1254         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1255         for (k=0; k<recvs2[cnt+1]; k++) {
1256           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1257         }
1258         cnt += 2 + recvs2[cnt+1];
1259         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1260       }
1261     }
1262     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1263   } /* -----------------------------------  */
1264 
1265   /* count number subdomains for each local node */
1266   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1267   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1268   cnt  = 0;
1269   for (i=0; i<nrecvs2; i++) {
1270     nt = recvs2[cnt++];
1271     for (j=0; j<nt; j++) {
1272       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1273       cnt += 2 + recvs2[cnt+1];
1274     }
1275   }
1276   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1277   *nproc    = nt;
1278   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1279   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1280   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1281   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1282   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1283   cnt  = 0;
1284   for (i=0; i<size; i++) {
1285     if (nprocs[i] > 0) {
1286       bprocs[i]        = cnt;
1287       (*procs)[cnt]    = i;
1288       (*numprocs)[cnt] = nprocs[i];
1289       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1290       cnt++;
1291     }
1292   }
1293 
1294   /* make the list of subdomains for each nontrivial local node */
1295   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1296   cnt  = 0;
1297   for (i=0; i<nrecvs2; i++) {
1298     nt = recvs2[cnt++];
1299     for (j=0; j<nt; j++) {
1300       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1301       cnt += 2 + recvs2[cnt+1];
1302     }
1303   }
1304   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1305   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1306 
1307   /* sort the node indexing by their global numbers */
1308   nt = *nproc;
1309   for (i=0; i<nt; i++) {
1310     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1311     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1312     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1313     ierr = PetscFree(tmp);CHKERRQ(ierr);
1314   }
1315 
1316   if (debug) { /* -----------------------------------  */
1317     nt = *nproc;
1318     for (i=0; i<nt; i++) {
1319       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1320       for (j=0; j<(*numprocs)[i]; j++) {
1321         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1322       }
1323       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1324     }
1325     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1326   } /* -----------------------------------  */
1327 
1328   /* wait on sends */
1329   if (nsends2) {
1330     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1331     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1332     ierr = PetscFree(send_status);CHKERRQ(ierr);
1333   }
1334 
1335   ierr = PetscFree(starts3);CHKERRQ(ierr);
1336   ierr = PetscFree(dest);CHKERRQ(ierr);
1337   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1338 
1339   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1340   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1341   ierr = PetscFree(starts);CHKERRQ(ierr);
1342   ierr = PetscFree(starts2);CHKERRQ(ierr);
1343   ierr = PetscFree(lens2);CHKERRQ(ierr);
1344 
1345   ierr = PetscFree(source);CHKERRQ(ierr);
1346   ierr = PetscFree(len);CHKERRQ(ierr);
1347   ierr = PetscFree(recvs);CHKERRQ(ierr);
1348   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1349   ierr = PetscFree(sends2);CHKERRQ(ierr);
1350 
1351   /* put the information about myself as the first entry in the list */
1352   first_procs    = (*procs)[0];
1353   first_numprocs = (*numprocs)[0];
1354   first_indices  = (*indices)[0];
1355   for (i=0; i<*nproc; i++) {
1356     if ((*procs)[i] == rank) {
1357       (*procs)[0]    = (*procs)[i];
1358       (*numprocs)[0] = (*numprocs)[i];
1359       (*indices)[0]  = (*indices)[i];
1360       (*procs)[i]    = first_procs;
1361       (*numprocs)[i] = first_numprocs;
1362       (*indices)[i]  = first_indices;
1363       break;
1364     }
1365   }
1366 
1367   /* save info for reuse */
1368   mapping->info_nproc = *nproc;
1369   mapping->info_procs = *procs;
1370   mapping->info_numprocs = *numprocs;
1371   mapping->info_indices = *indices;
1372   mapping->info_cached = PETSC_TRUE;
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 /*@C
1377     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1378 
1379     Collective on ISLocalToGlobalMapping
1380 
1381     Input Parameters:
1382 .   mapping - the mapping from local to global indexing
1383 
1384     Output Parameter:
1385 +   nproc - number of processors that are connected to this one
1386 .   proc - neighboring processors
1387 .   numproc - number of indices for each processor
1388 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1389 
1390     Level: advanced
1391 
1392 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1393           ISLocalToGlobalMappingGetInfo()
1394 @*/
1395 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1396 {
1397   PetscErrorCode ierr;
1398 
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1401   if (mapping->info_free) {
1402     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1403     if (*indices) {
1404       PetscInt i;
1405 
1406       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1407       for (i=1; i<*nproc; i++) {
1408         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1409       }
1410       ierr = PetscFree(*indices);CHKERRQ(ierr);
1411     }
1412   }
1413   *nproc    = 0;
1414   *procs    = NULL;
1415   *numprocs = NULL;
1416   *indices  = NULL;
1417   PetscFunctionReturn(0);
1418 }
1419 
1420 /*@C
1421     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1422      each index shared by more than one processor
1423 
1424     Collective on ISLocalToGlobalMapping
1425 
1426     Input Parameters:
1427 .   mapping - the mapping from local to global indexing
1428 
1429     Output Parameter:
1430 +   nproc - number of processors that are connected to this one
1431 .   proc - neighboring processors
1432 .   numproc - number of indices for each subdomain (processor)
1433 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1434 
1435     Level: advanced
1436 
1437     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1438 
1439     Fortran Usage:
1440 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1441 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1442           PetscInt indices[nproc][numprocmax],ierr)
1443         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1444         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1445 
1446 
1447 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1448           ISLocalToGlobalMappingRestoreInfo()
1449 @*/
1450 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1451 {
1452   PetscErrorCode ierr;
1453   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1454 
1455   PetscFunctionBegin;
1456   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1457   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1458   if (bs > 1) { /* we need to expand the cached info */
1459     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1460     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1461     for (i=0; i<*nproc; i++) {
1462       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1463       for (j=0; j<bnumprocs[i]; j++) {
1464         for (k=0; k<bs; k++) {
1465           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1466         }
1467       }
1468       (*numprocs)[i] = bnumprocs[i]*bs;
1469     }
1470     mapping->info_free = PETSC_TRUE;
1471   } else {
1472     *numprocs = bnumprocs;
1473     *indices  = bindices;
1474   }
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 /*@C
1479     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1480 
1481     Collective on ISLocalToGlobalMapping
1482 
1483     Input Parameters:
1484 .   mapping - the mapping from local to global indexing
1485 
1486     Output Parameter:
1487 +   nproc - number of processors that are connected to this one
1488 .   proc - neighboring processors
1489 .   numproc - number of indices for each processor
1490 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1491 
1492     Level: advanced
1493 
1494 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1495           ISLocalToGlobalMappingGetInfo()
1496 @*/
1497 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1498 {
1499   PetscErrorCode ierr;
1500 
1501   PetscFunctionBegin;
1502   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1503   PetscFunctionReturn(0);
1504 }
1505 
1506 /*@C
1507     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1508 
1509     Collective on ISLocalToGlobalMapping
1510 
1511     Input Parameters:
1512 .   mapping - the mapping from local to global indexing
1513 
1514     Output Parameter:
1515 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1516 .   count - number of neighboring processors per node
1517 -   indices - indices of processes sharing the node (sorted)
1518 
1519     Level: advanced
1520 
1521     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1522 
1523 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1524           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1525 @*/
1526 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1527 {
1528   PetscInt       n;
1529   PetscErrorCode ierr;
1530 
1531   PetscFunctionBegin;
1532   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1533   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1534   if (!mapping->info_nodec) {
1535     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1536 
1537     ierr = PetscCalloc1(n+1,&mapping->info_nodec);CHKERRQ(ierr); /* always allocate to flag setup */
1538     ierr = PetscMalloc1(n,&mapping->info_nodei);CHKERRQ(ierr);
1539     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1540     for (i=0,m=0;i<n;i++) { mapping->info_nodec[i] = 1; m++; }
1541     for (i=1;i<n_neigh;i++) {
1542       PetscInt j;
1543 
1544       m += n_shared[i];
1545       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1546     }
1547     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1548     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1549     ierr = PetscMemzero(mapping->info_nodec,n*sizeof(PetscInt));CHKERRQ(ierr);
1550     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1551     for (i=1;i<n_neigh;i++) {
1552       PetscInt j;
1553 
1554       for (j=0;j<n_shared[i];j++) {
1555         PetscInt k = shared[i][j];
1556 
1557         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1558         mapping->info_nodec[k] += 1;
1559       }
1560     }
1561     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1562     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1563   }
1564   if (nnodes)  *nnodes  = n;
1565   if (count)   *count   = mapping->info_nodec;
1566   if (indices) *indices = mapping->info_nodei;
1567   PetscFunctionReturn(0);
1568 }
1569 
1570 /*@C
1571     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1572 
1573     Collective on ISLocalToGlobalMapping
1574 
1575     Input Parameters:
1576 .   mapping - the mapping from local to global indexing
1577 
1578     Output Parameter:
1579 +   nnodes - number of local nodes
1580 .   count - number of neighboring processors per node
1581 -   indices - indices of processes sharing the node (sorted)
1582 
1583     Level: advanced
1584 
1585 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1586           ISLocalToGlobalMappingGetInfo()
1587 @*/
1588 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1589 {
1590   PetscFunctionBegin;
1591   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1592   if (nnodes)  *nnodes  = 0;
1593   if (count)   *count   = NULL;
1594   if (indices) *indices = NULL;
1595   PetscFunctionReturn(0);
1596 }
1597 
1598 /*@C
1599    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1600 
1601    Not Collective
1602 
1603    Input Arguments:
1604 . ltog - local to global mapping
1605 
1606    Output Arguments:
1607 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1608 
1609    Level: advanced
1610 
1611    Notes:
1612     ISLocalToGlobalMappingGetSize() returns the length the this array
1613 
1614 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1615 @*/
1616 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1617 {
1618   PetscFunctionBegin;
1619   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1620   PetscValidPointer(array,2);
1621   if (ltog->bs == 1) {
1622     *array = ltog->indices;
1623   } else {
1624     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1625     const PetscInt *ii;
1626     PetscErrorCode ierr;
1627 
1628     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1629     *array = jj;
1630     k    = 0;
1631     ii   = ltog->indices;
1632     for (i=0; i<n; i++)
1633       for (j=0; j<bs; j++)
1634         jj[k++] = bs*ii[i] + j;
1635   }
1636   PetscFunctionReturn(0);
1637 }
1638 
1639 /*@C
1640    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1641 
1642    Not Collective
1643 
1644    Input Arguments:
1645 + ltog - local to global mapping
1646 - array - array of indices
1647 
1648    Level: advanced
1649 
1650 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1651 @*/
1652 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1653 {
1654   PetscFunctionBegin;
1655   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1656   PetscValidPointer(array,2);
1657   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1658 
1659   if (ltog->bs > 1) {
1660     PetscErrorCode ierr;
1661     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1662   }
1663   PetscFunctionReturn(0);
1664 }
1665 
1666 /*@C
1667    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1668 
1669    Not Collective
1670 
1671    Input Arguments:
1672 . ltog - local to global mapping
1673 
1674    Output Arguments:
1675 . array - array of indices
1676 
1677    Level: advanced
1678 
1679 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1680 @*/
1681 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1682 {
1683   PetscFunctionBegin;
1684   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1685   PetscValidPointer(array,2);
1686   *array = ltog->indices;
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 /*@C
1691    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1692 
1693    Not Collective
1694 
1695    Input Arguments:
1696 + ltog - local to global mapping
1697 - array - array of indices
1698 
1699    Level: advanced
1700 
1701 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1702 @*/
1703 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1704 {
1705   PetscFunctionBegin;
1706   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1707   PetscValidPointer(array,2);
1708   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1709   *array = NULL;
1710   PetscFunctionReturn(0);
1711 }
1712 
1713 /*@C
1714    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1715 
1716    Not Collective
1717 
1718    Input Arguments:
1719 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1720 . n - number of mappings to concatenate
1721 - ltogs - local to global mappings
1722 
1723    Output Arguments:
1724 . ltogcat - new mapping
1725 
1726    Note: this currently always returns a mapping with block size of 1
1727 
1728    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1729 
1730    Level: advanced
1731 
1732 .seealso: ISLocalToGlobalMappingCreate()
1733 @*/
1734 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1735 {
1736   PetscInt       i,cnt,m,*idx;
1737   PetscErrorCode ierr;
1738 
1739   PetscFunctionBegin;
1740   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1741   if (n > 0) PetscValidPointer(ltogs,3);
1742   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1743   PetscValidPointer(ltogcat,4);
1744   for (cnt=0,i=0; i<n; i++) {
1745     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1746     cnt += m;
1747   }
1748   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1749   for (cnt=0,i=0; i<n; i++) {
1750     const PetscInt *subidx;
1751     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1752     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1753     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1754     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1755     cnt += m;
1756   }
1757   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1758   PetscFunctionReturn(0);
1759 }
1760 
1761 /*MC
1762       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1763                                     used this is good for only small and moderate size problems.
1764 
1765    Options Database Keys:
1766 +   -islocaltoglobalmapping_type basic - select this method
1767 
1768    Level: beginner
1769 
1770 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1771 M*/
1772 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1773 {
1774   PetscFunctionBegin;
1775   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1776   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1777   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1778   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1779   PetscFunctionReturn(0);
1780 }
1781 
1782 /*MC
1783       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1784                                     used this is good for large memory problems.
1785 
1786    Options Database Keys:
1787 +   -islocaltoglobalmapping_type hash - select this method
1788 
1789 
1790    Notes:
1791     This is selected automatically for large problems if the user does not set the type.
1792 
1793    Level: beginner
1794 
1795 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1796 M*/
1797 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1798 {
1799   PetscFunctionBegin;
1800   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1801   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1802   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1803   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1804   PetscFunctionReturn(0);
1805 }
1806 
1807 
1808 /*@C
1809     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1810 
1811    Not Collective
1812 
1813    Input Parameters:
1814 +  sname - name of a new method
1815 -  routine_create - routine to create method context
1816 
1817    Notes:
1818    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1819 
1820    Sample usage:
1821 .vb
1822    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1823 .ve
1824 
1825    Then, your mapping can be chosen with the procedural interface via
1826 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1827    or at runtime via the option
1828 $     -islocaltoglobalmapping_type my_mapper
1829 
1830    Level: advanced
1831 
1832 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1833 
1834 @*/
1835 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1836 {
1837   PetscErrorCode ierr;
1838 
1839   PetscFunctionBegin;
1840   ierr = ISInitializePackage();CHKERRQ(ierr);
1841   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1842   PetscFunctionReturn(0);
1843 }
1844 
1845 /*@C
1846    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1847 
1848    Logically Collective on ISLocalToGlobalMapping
1849 
1850    Input Parameters:
1851 +  ltog - the ISLocalToGlobalMapping object
1852 -  type - a known method
1853 
1854    Options Database Key:
1855 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1856     of available methods (for instance, basic or hash)
1857 
1858    Notes:
1859    See "petsc/include/petscis.h" for available methods
1860 
1861   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1862   then set the ISLocalToGlobalMapping type from the options database rather than by using
1863   this routine.
1864 
1865   Level: intermediate
1866 
1867   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1868   are accessed by ISLocalToGlobalMappingSetType().
1869 
1870 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1871 
1872 @*/
1873 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1874 {
1875   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1876   PetscBool      match;
1877 
1878   PetscFunctionBegin;
1879   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1880   PetscValidCharPointer(type,2);
1881 
1882   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1883   if (match) PetscFunctionReturn(0);
1884 
1885   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1886   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1887   /* Destroy the previous private LTOG context */
1888   if (ltog->ops->destroy) {
1889     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1890     ltog->ops->destroy = NULL;
1891   }
1892   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1893   ierr = (*r)(ltog);CHKERRQ(ierr);
1894   PetscFunctionReturn(0);
1895 }
1896 
1897 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1898 
1899 /*@C
1900   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1901 
1902   Not Collective
1903 
1904   Level: advanced
1905 
1906 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1907 @*/
1908 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1909 {
1910   PetscErrorCode ierr;
1911 
1912   PetscFunctionBegin;
1913   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1914   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1915 
1916   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1917   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1918   PetscFunctionReturn(0);
1919 }
1920 
1921