xref: /petsc/src/vec/is/utils/isltog.c (revision 89583661dbdda284bc23265230c2f308532cda40)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
20 {
21   PetscInt       numCells, step = 1;
22   PetscBool      isStride;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBeginHot;
26   *pStart = 0;
27   *points = NULL;
28   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
29   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
30   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
31   *pEnd   = *pStart + numCells;
32   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
33   PetscFunctionReturn(0);
34 }
35 
36 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
37 {
38   PetscInt       step = 1;
39   PetscBool      isStride;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBeginHot;
43   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
44   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
45   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
50 {
51   PetscErrorCode ierr;
52 
53   PetscFunctionBeginHot;
54   if (points) {
55     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
56     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
57   } else {
58     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
59     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 /* -----------------------------------------------------------------------------------------*/
65 
66 /*
67     Creates the global mapping information in the ISLocalToGlobalMapping structure
68 
69     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
70 */
71 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
72 {
73   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
74   PetscErrorCode ierr;
75 
76   PetscFunctionBegin;
77   if (mapping->data) PetscFunctionReturn(0);
78   end   = 0;
79   start = PETSC_MAX_INT;
80 
81   for (i=0; i<n; i++) {
82     if (idx[i] < 0) continue;
83     if (idx[i] < start) start = idx[i];
84     if (idx[i] > end)   end   = idx[i];
85   }
86   if (start > end) {start = 0; end = -1;}
87   mapping->globalstart = start;
88   mapping->globalend   = end;
89   if (!((PetscObject)mapping)->type_name) {
90     if ((end - start) > PetscMax(4*n,1000000)) {
91       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
92     } else {
93       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
94     }
95   }
96   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
101 {
102   PetscErrorCode              ierr;
103   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
104   ISLocalToGlobalMapping_Basic *map;
105 
106   PetscFunctionBegin;
107   start            = mapping->globalstart;
108   end              = mapping->globalend;
109   ierr             = PetscNew(&map);CHKERRQ(ierr);
110   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
111   map->globals     = globals;
112   for (i=0; i<end-start+1; i++) globals[i] = -1;
113   for (i=0; i<n; i++) {
114     if (idx[i] < 0) continue;
115     globals[idx[i] - start] = i;
116   }
117   mapping->data = (void*)map;
118   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
123 {
124   PetscErrorCode              ierr;
125   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
126   ISLocalToGlobalMapping_Hash *map;
127 
128   PetscFunctionBegin;
129   ierr = PetscNew(&map);CHKERRQ(ierr);
130   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
131   for (i=0; i<n; i++ ) {
132     if (idx[i] < 0) continue;
133     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
134   }
135   mapping->data = (void*)map;
136   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
141 {
142   PetscErrorCode              ierr;
143   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
144 
145   PetscFunctionBegin;
146   if (!map) PetscFunctionReturn(0);
147   ierr = PetscFree(map->globals);CHKERRQ(ierr);
148   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
153 {
154   PetscErrorCode              ierr;
155   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
156 
157   PetscFunctionBegin;
158   if (!map) PetscFunctionReturn(0);
159   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
160   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #define GTOLTYPE _Basic
165 #define GTOLNAME _Basic
166 #define GTOLBS mapping->bs
167 #define GTOL(g, local) do {                  \
168     local = map->globals[g/bs - start];      \
169     local = bs*local + (g % bs);             \
170   } while (0)
171 
172 #include <../src/vec/is/utils/isltog.h>
173 
174 #define GTOLTYPE _Basic
175 #define GTOLNAME Block_Basic
176 #define GTOLBS 1
177 #define GTOL(g, local) do {                  \
178     local = map->globals[g - start];         \
179   } while (0)
180 #include <../src/vec/is/utils/isltog.h>
181 
182 #define GTOLTYPE _Hash
183 #define GTOLNAME _Hash
184 #define GTOLBS mapping->bs
185 #define GTOL(g, local) do {                         \
186     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
187     local = bs*local + (g % bs);                    \
188    } while (0)
189 #include <../src/vec/is/utils/isltog.h>
190 
191 #define GTOLTYPE _Hash
192 #define GTOLNAME Block_Hash
193 #define GTOLBS 1
194 #define GTOL(g, local) do {                         \
195     (void)PetscHMapIGet(map->globalht,g,&local);    \
196   } while (0)
197 #include <../src/vec/is/utils/isltog.h>
198 
199 /*@
200     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
201 
202     Not Collective
203 
204     Input Parameter:
205 .   ltog - local to global mapping
206 
207     Output Parameter:
208 .   nltog - the duplicated local to global mapping
209 
210     Level: advanced
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
215 {
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
220   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 /*@
225     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
226 
227     Not Collective
228 
229     Input Parameter:
230 .   ltog - local to global mapping
231 
232     Output Parameter:
233 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
234 
235     Level: advanced
236 
237 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
238 @*/
239 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
240 {
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
243   PetscValidIntPointer(n,2);
244   *n = mapping->bs*mapping->n;
245   PetscFunctionReturn(0);
246 }
247 
248 /*@C
249     ISLocalToGlobalMappingView - View a local to global mapping
250 
251     Not Collective
252 
253     Input Parameters:
254 +   ltog - local to global mapping
255 -   viewer - viewer
256 
257     Level: advanced
258 
259 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
260 @*/
261 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
262 {
263   PetscInt       i;
264   PetscMPIInt    rank;
265   PetscBool      iascii;
266   PetscErrorCode ierr;
267 
268   PetscFunctionBegin;
269   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
270   if (!viewer) {
271     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
272   }
273   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
274 
275   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
276   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
277   if (iascii) {
278     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
279     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
280     for (i=0; i<mapping->n; i++) {
281       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
282     }
283     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
284     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 /*@
290     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
291     ordering and a global parallel ordering.
292 
293     Not collective
294 
295     Input Parameter:
296 .   is - index set containing the global numbers for each local number
297 
298     Output Parameter:
299 .   mapping - new mapping data structure
300 
301     Notes:
302     the block size of the IS determines the block size of the mapping
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
308 {
309   PetscErrorCode ierr;
310   PetscInt       n,bs;
311   const PetscInt *indices;
312   MPI_Comm       comm;
313   PetscBool      isblock;
314 
315   PetscFunctionBegin;
316   PetscValidHeaderSpecific(is,IS_CLASSID,1);
317   PetscValidPointer(mapping,2);
318 
319   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
320   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
321   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
322   if (!isblock) {
323     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
324     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
325     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
326   } else {
327     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
328     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
329     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
330     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
331   }
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
337     ordering and a global parallel ordering.
338 
339     Collective
340 
341     Input Parameter:
342 +   sf - star forest mapping contiguous local indices to (rank, offset)
343 -   start - first global index on this process
344 
345     Output Parameter:
346 .   mapping - new mapping data structure
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
351 @*/
352 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
353 {
354   PetscErrorCode ierr;
355   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
356   const PetscInt *ilocal;
357   MPI_Comm       comm;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
361   PetscValidPointer(mapping,3);
362 
363   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
364   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
365   if (ilocal) {
366     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
367   }
368   else maxlocal = nleaves;
369   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
370   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
371   for (i=0; i<nroots; i++) globals[i] = start + i;
372   for (i=0; i<maxlocal; i++) ltog[i] = -1;
373   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
374   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
376   ierr = PetscFree(globals);CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
382 
383     Not collective
384 
385     Input Parameters:
386 .   mapping - mapping data structure
387 .   bs - the blocksize
388 
389     Level: advanced
390 
391 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
392 @*/
393 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
394 {
395   PetscInt       *nid;
396   const PetscInt *oid;
397   PetscInt       i,cn,on,obs,nn;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
402   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
403   if (bs == mapping->bs) PetscFunctionReturn(0);
404   on  = mapping->n;
405   obs = mapping->bs;
406   oid = mapping->indices;
407   nn  = (on*obs)/bs;
408   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
409 
410   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
411   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
412   for (i=0;i<nn;i++) {
413     PetscInt j;
414     for (j=0,cn=0;j<bs-1;j++) {
415       if (oid[i*bs+j] < 0) { cn++; continue; }
416       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
417     }
418     if (oid[i*bs+j] < 0) cn++;
419     if (cn) {
420       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
421       nid[i] = -1;
422     } else {
423       nid[i] = oid[i*bs]/bs;
424     }
425   }
426   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
427 
428   mapping->n           = nn;
429   mapping->bs          = bs;
430   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
431   mapping->indices     = nid;
432   mapping->globalstart = 0;
433   mapping->globalend   = 0;
434 
435   /* reset the cached information */
436   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
437   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
438   if (mapping->info_indices) {
439     PetscInt i;
440 
441     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
442     for (i=1; i<mapping->info_nproc; i++) {
443       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
444     }
445     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
446   }
447   mapping->info_cached = PETSC_FALSE;
448 
449   if (mapping->ops->destroy) {
450     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
451   }
452   PetscFunctionReturn(0);
453 }
454 
455 /*@
456     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
457     ordering and a global parallel ordering.
458 
459     Not Collective
460 
461     Input Parameters:
462 .   mapping - mapping data structure
463 
464     Output Parameter:
465 .   bs - the blocksize
466 
467     Level: advanced
468 
469 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
470 @*/
471 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
472 {
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
475   *bs = mapping->bs;
476   PetscFunctionReturn(0);
477 }
478 
479 /*@
480     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
481     ordering and a global parallel ordering.
482 
483     Not Collective, but communicator may have more than one process
484 
485     Input Parameters:
486 +   comm - MPI communicator
487 .   bs - the block size
488 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
489 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
490 -   mode - see PetscCopyMode
491 
492     Output Parameter:
493 .   mapping - new mapping data structure
494 
495     Notes:
496     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
497 
498     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
499     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
500     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
501 
502     Level: advanced
503 
504 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
505           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
506 @*/
507 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
508 {
509   PetscErrorCode ierr;
510   PetscInt       *in;
511 
512   PetscFunctionBegin;
513   if (n) PetscValidIntPointer(indices,3);
514   PetscValidPointer(mapping,4);
515 
516   *mapping = NULL;
517   ierr = ISInitializePackage();CHKERRQ(ierr);
518 
519   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
520                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
521   (*mapping)->n             = n;
522   (*mapping)->bs            = bs;
523   (*mapping)->info_cached   = PETSC_FALSE;
524   (*mapping)->info_free     = PETSC_FALSE;
525   (*mapping)->info_procs    = NULL;
526   (*mapping)->info_numprocs = NULL;
527   (*mapping)->info_indices  = NULL;
528   (*mapping)->info_nodec    = NULL;
529   (*mapping)->info_nodei    = NULL;
530 
531   (*mapping)->ops->globaltolocalmappingapply      = NULL;
532   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
533   (*mapping)->ops->destroy                        = NULL;
534   if (mode == PETSC_COPY_VALUES) {
535     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
536     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
537     (*mapping)->indices = in;
538     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
539   } else if (mode == PETSC_OWN_POINTER) {
540     (*mapping)->indices = (PetscInt*)indices;
541     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
542   }
543   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
544   PetscFunctionReturn(0);
545 }
546 
547 PetscFunctionList ISLocalToGlobalMappingList = NULL;
548 
549 
550 /*@
551    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
552 
553    Not collective
554 
555    Input Parameters:
556 .  mapping - mapping data structure
557 
558    Level: advanced
559 
560 @*/
561 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
562 {
563   PetscErrorCode             ierr;
564   char                       type[256];
565   ISLocalToGlobalMappingType defaulttype = "Not set";
566   PetscBool                  flg;
567 
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
570   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
571   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
572   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
573   if (flg) {
574     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
575   }
576   ierr = PetscOptionsEnd();CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
582    ordering and a global parallel ordering.
583 
584    Note Collective
585 
586    Input Parameters:
587 .  mapping - mapping data structure
588 
589    Level: advanced
590 
591 .seealso: ISLocalToGlobalMappingCreate()
592 @*/
593 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
594 {
595   PetscErrorCode ierr;
596 
597   PetscFunctionBegin;
598   if (!*mapping) PetscFunctionReturn(0);
599   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
600   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
601   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
602   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
603   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
604   if ((*mapping)->info_indices) {
605     PetscInt i;
606 
607     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
608     for (i=1; i<(*mapping)->info_nproc; i++) {
609       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
610     }
611     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
612   }
613   if ((*mapping)->info_nodei) {
614     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
615   }
616   ierr = PetscFree((*mapping)->info_nodei);CHKERRQ(ierr);
617   ierr = PetscFree((*mapping)->info_nodec);CHKERRQ(ierr);
618   if ((*mapping)->ops->destroy) {
619     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
620   }
621   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
622   *mapping = 0;
623   PetscFunctionReturn(0);
624 }
625 
626 /*@
627     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
628     a new index set using the global numbering defined in an ISLocalToGlobalMapping
629     context.
630 
631     Collective on is
632 
633     Input Parameters:
634 +   mapping - mapping between local and global numbering
635 -   is - index set in local numbering
636 
637     Output Parameters:
638 .   newis - index set in global numbering
639 
640     Notes:
641     The output IS will have the same communicator of the input IS.
642 
643     Level: advanced
644 
645 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
646           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
647 @*/
648 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
649 {
650   PetscErrorCode ierr;
651   PetscInt       n,*idxout;
652   const PetscInt *idxin;
653 
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
656   PetscValidHeaderSpecific(is,IS_CLASSID,2);
657   PetscValidPointer(newis,3);
658 
659   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
660   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
661   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
662   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
663   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
664   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
665   PetscFunctionReturn(0);
666 }
667 
668 /*@
669    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
670    and converts them to the global numbering.
671 
672    Not collective
673 
674    Input Parameters:
675 +  mapping - the local to global mapping context
676 .  N - number of integers
677 -  in - input indices in local numbering
678 
679    Output Parameter:
680 .  out - indices in global numbering
681 
682    Notes:
683    The in and out array parameters may be identical.
684 
685    Level: advanced
686 
687 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
688           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
689           AOPetscToApplication(), ISGlobalToLocalMappingApply()
690 
691 @*/
692 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
693 {
694   PetscInt i,bs,Nmax;
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
698   bs   = mapping->bs;
699   Nmax = bs*mapping->n;
700   if (bs == 1) {
701     const PetscInt *idx = mapping->indices;
702     for (i=0; i<N; i++) {
703       if (in[i] < 0) {
704         out[i] = in[i];
705         continue;
706       }
707       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
708       out[i] = idx[in[i]];
709     }
710   } else {
711     const PetscInt *idx = mapping->indices;
712     for (i=0; i<N; i++) {
713       if (in[i] < 0) {
714         out[i] = in[i];
715         continue;
716       }
717       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
718       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
719     }
720   }
721   PetscFunctionReturn(0);
722 }
723 
724 /*@
725    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
726 
727    Not collective
728 
729    Input Parameters:
730 +  mapping - the local to global mapping context
731 .  N - number of integers
732 -  in - input indices in local block numbering
733 
734    Output Parameter:
735 .  out - indices in global block numbering
736 
737    Notes:
738    The in and out array parameters may be identical.
739 
740    Example:
741      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
742      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
743 
744    Level: advanced
745 
746 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
747           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
748           AOPetscToApplication(), ISGlobalToLocalMappingApply()
749 
750 @*/
751 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
752 {
753 
754   PetscFunctionBegin;
755   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
756   {
757     PetscInt i,Nmax = mapping->n;
758     const PetscInt *idx = mapping->indices;
759 
760     for (i=0; i<N; i++) {
761       if (in[i] < 0) {
762         out[i] = in[i];
763         continue;
764       }
765       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
766       out[i] = idx[in[i]];
767     }
768   }
769   PetscFunctionReturn(0);
770 }
771 
772 /*@
773     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
774     specified with a global numbering.
775 
776     Not collective
777 
778     Input Parameters:
779 +   mapping - mapping between local and global numbering
780 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
781            IS_GTOLM_DROP - drops the indices with no local value from the output list
782 .   n - number of global indices to map
783 -   idx - global indices to map
784 
785     Output Parameters:
786 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
787 -   idxout - local index of each global index, one must pass in an array long enough
788              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
789              idxout == NULL to determine the required length (returned in nout)
790              and then allocate the required space and call ISGlobalToLocalMappingApply()
791              a second time to set the values.
792 
793     Notes:
794     Either nout or idxout may be NULL. idx and idxout may be identical.
795 
796     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
797     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
798     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
799 
800     Level: advanced
801 
802     Developer Note: The manual page states that idx and idxout may be identical but the calling
803        sequence declares idx as const so it cannot be the same as idxout.
804 
805 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
806           ISLocalToGlobalMappingDestroy()
807 @*/
808 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
809 {
810   PetscErrorCode ierr;
811 
812   PetscFunctionBegin;
813   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
814   if (!mapping->data) {
815     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
816   }
817   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 /*@
822     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
823     a new index set using the local numbering defined in an ISLocalToGlobalMapping
824     context.
825 
826     Not collective
827 
828     Input Parameters:
829 +   mapping - mapping between local and global numbering
830 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
831            IS_GTOLM_DROP - drops the indices with no local value from the output list
832 -   is - index set in global numbering
833 
834     Output Parameters:
835 .   newis - index set in local numbering
836 
837     Notes:
838     The output IS will be sequential, as it encodes a purely local operation
839 
840     Level: advanced
841 
842 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
843           ISLocalToGlobalMappingDestroy()
844 @*/
845 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
846 {
847   PetscErrorCode ierr;
848   PetscInt       n,nout,*idxout;
849   const PetscInt *idxin;
850 
851   PetscFunctionBegin;
852   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
853   PetscValidHeaderSpecific(is,IS_CLASSID,3);
854   PetscValidPointer(newis,4);
855 
856   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
857   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
858   if (type == IS_GTOLM_MASK) {
859     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
860   } else {
861     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
862     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
863   }
864   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
865   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
866   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
867   PetscFunctionReturn(0);
868 }
869 
870 /*@
871     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
872     specified with a block global numbering.
873 
874     Not collective
875 
876     Input Parameters:
877 +   mapping - mapping between local and global numbering
878 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
879            IS_GTOLM_DROP - drops the indices with no local value from the output list
880 .   n - number of global indices to map
881 -   idx - global indices to map
882 
883     Output Parameters:
884 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
885 -   idxout - local index of each global index, one must pass in an array long enough
886              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
887              idxout == NULL to determine the required length (returned in nout)
888              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
889              a second time to set the values.
890 
891     Notes:
892     Either nout or idxout may be NULL. idx and idxout may be identical.
893 
894     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
895     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
896     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
897 
898     Level: advanced
899 
900     Developer Note: The manual page states that idx and idxout may be identical but the calling
901        sequence declares idx as const so it cannot be the same as idxout.
902 
903 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
904           ISLocalToGlobalMappingDestroy()
905 @*/
906 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
907                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
908 {
909   PetscErrorCode ierr;
910 
911   PetscFunctionBegin;
912   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
913   if (!mapping->data) {
914     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
915   }
916   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
917   PetscFunctionReturn(0);
918 }
919 
920 
921 /*@C
922     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
923      each index shared by more than one processor
924 
925     Collective on ISLocalToGlobalMapping
926 
927     Input Parameters:
928 .   mapping - the mapping from local to global indexing
929 
930     Output Parameter:
931 +   nproc - number of processors that are connected to this one
932 .   proc - neighboring processors
933 .   numproc - number of indices for each subdomain (processor)
934 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
935 
936     Level: advanced
937 
938     Fortran Usage:
939 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
940 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
941           PetscInt indices[nproc][numprocmax],ierr)
942         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
943         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
944 
945 
946 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
947           ISLocalToGlobalMappingRestoreInfo()
948 @*/
949 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
950 {
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
955   if (mapping->info_cached) {
956     *nproc    = mapping->info_nproc;
957     *procs    = mapping->info_procs;
958     *numprocs = mapping->info_numprocs;
959     *indices  = mapping->info_indices;
960   } else {
961     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
962   }
963   PetscFunctionReturn(0);
964 }
965 
966 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
967 {
968   PetscErrorCode ierr;
969   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
970   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
971   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
972   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
973   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
974   PetscInt       first_procs,first_numprocs,*first_indices;
975   MPI_Request    *recv_waits,*send_waits;
976   MPI_Status     recv_status,*send_status,*recv_statuses;
977   MPI_Comm       comm;
978   PetscBool      debug = PETSC_FALSE;
979 
980   PetscFunctionBegin;
981   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
982   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
983   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
984   if (size == 1) {
985     *nproc         = 0;
986     *procs         = NULL;
987     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
988     (*numprocs)[0] = 0;
989     ierr           = PetscNew(indices);CHKERRQ(ierr);
990     (*indices)[0]  = NULL;
991     /* save info for reuse */
992     mapping->info_nproc = *nproc;
993     mapping->info_procs = *procs;
994     mapping->info_numprocs = *numprocs;
995     mapping->info_indices = *indices;
996     mapping->info_cached = PETSC_TRUE;
997     PetscFunctionReturn(0);
998   }
999 
1000   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1001 
1002   /*
1003     Notes on ISLocalToGlobalMappingGetBlockInfo
1004 
1005     globally owned node - the nodes that have been assigned to this processor in global
1006            numbering, just for this routine.
1007 
1008     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1009            boundary (i.e. is has more than one local owner)
1010 
1011     locally owned node - node that exists on this processors subdomain
1012 
1013     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1014            local subdomain
1015   */
1016   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1017   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1018   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1019 
1020   for (i=0; i<n; i++) {
1021     if (lindices[i] > max) max = lindices[i];
1022   }
1023   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1024   Ng++;
1025   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1026   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1027   scale  = Ng/size + 1;
1028   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1029   rstart = scale*rank;
1030 
1031   /* determine ownership ranges of global indices */
1032   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1033   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1034 
1035   /* determine owners of each local node  */
1036   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1037   for (i=0; i<n; i++) {
1038     proc             = lindices[i]/scale; /* processor that globally owns this index */
1039     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1040     owner[i]         = proc;
1041     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1042   }
1043   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1044   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1045 
1046   /* inform other processors of number of messages and max length*/
1047   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1048   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1049 
1050   /* post receives for owned rows */
1051   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1052   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1053   for (i=0; i<nrecvs; i++) {
1054     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1055   }
1056 
1057   /* pack messages containing lists of local nodes to owners */
1058   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1059   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1060   starts[0] = 0;
1061   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1062   for (i=0; i<n; i++) {
1063     sends[starts[owner[i]]++] = lindices[i];
1064     sends[starts[owner[i]]++] = i;
1065   }
1066   ierr = PetscFree(owner);CHKERRQ(ierr);
1067   starts[0] = 0;
1068   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1069 
1070   /* send the messages */
1071   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1072   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1073   cnt = 0;
1074   for (i=0; i<size; i++) {
1075     if (nprocs[2*i]) {
1076       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1077       dest[cnt] = i;
1078       cnt++;
1079     }
1080   }
1081   ierr = PetscFree(starts);CHKERRQ(ierr);
1082 
1083   /* wait on receives */
1084   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1085   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1086   cnt  = nrecvs;
1087   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1088   while (cnt) {
1089     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1090     /* unpack receives into our local space */
1091     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1092     source[imdex] = recv_status.MPI_SOURCE;
1093     len[imdex]    = len[imdex]/2;
1094     /* count how many local owners for each of my global owned indices */
1095     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1096     cnt--;
1097   }
1098   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1099 
1100   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1101   nowned  = 0;
1102   nownedm = 0;
1103   for (i=0; i<ng; i++) {
1104     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1105   }
1106 
1107   /* create single array to contain rank of all local owners of each globally owned index */
1108   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1109   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1110   starts[0] = 0;
1111   for (i=1; i<ng; i++) {
1112     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1113     else starts[i] = starts[i-1];
1114   }
1115 
1116   /* for each nontrival globally owned node list all arriving processors */
1117   for (i=0; i<nrecvs; i++) {
1118     for (j=0; j<len[i]; j++) {
1119       node = recvs[2*i*nmax+2*j]-rstart;
1120       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1121     }
1122   }
1123 
1124   if (debug) { /* -----------------------------------  */
1125     starts[0] = 0;
1126     for (i=1; i<ng; i++) {
1127       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1128       else starts[i] = starts[i-1];
1129     }
1130     for (i=0; i<ng; i++) {
1131       if (nownedsenders[i] > 1) {
1132         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1133         for (j=0; j<nownedsenders[i]; j++) {
1134           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1135         }
1136         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1137       }
1138     }
1139     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1140   } /* -----------------------------------  */
1141 
1142   /* wait on original sends */
1143   if (nsends) {
1144     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1145     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1146     ierr = PetscFree(send_status);CHKERRQ(ierr);
1147   }
1148   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1149   ierr = PetscFree(sends);CHKERRQ(ierr);
1150   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1151 
1152   /* pack messages to send back to local owners */
1153   starts[0] = 0;
1154   for (i=1; i<ng; i++) {
1155     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1156     else starts[i] = starts[i-1];
1157   }
1158   nsends2 = nrecvs;
1159   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1160   for (i=0; i<nrecvs; i++) {
1161     nprocs[i] = 1;
1162     for (j=0; j<len[i]; j++) {
1163       node = recvs[2*i*nmax+2*j]-rstart;
1164       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1165     }
1166   }
1167   nt = 0;
1168   for (i=0; i<nsends2; i++) nt += nprocs[i];
1169 
1170   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1171   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1172 
1173   starts2[0] = 0;
1174   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1175   /*
1176      Each message is 1 + nprocs[i] long, and consists of
1177        (0) the number of nodes being sent back
1178        (1) the local node number,
1179        (2) the number of processors sharing it,
1180        (3) the processors sharing it
1181   */
1182   for (i=0; i<nsends2; i++) {
1183     cnt = 1;
1184     sends2[starts2[i]] = 0;
1185     for (j=0; j<len[i]; j++) {
1186       node = recvs[2*i*nmax+2*j]-rstart;
1187       if (nownedsenders[node] > 1) {
1188         sends2[starts2[i]]++;
1189         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1190         sends2[starts2[i]+cnt++] = nownedsenders[node];
1191         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1192         cnt += nownedsenders[node];
1193       }
1194     }
1195   }
1196 
1197   /* receive the message lengths */
1198   nrecvs2 = nsends;
1199   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1200   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1201   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1202   for (i=0; i<nrecvs2; i++) {
1203     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1204   }
1205 
1206   /* send the message lengths */
1207   for (i=0; i<nsends2; i++) {
1208     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1209   }
1210 
1211   /* wait on receives of lens */
1212   if (nrecvs2) {
1213     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1214     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1215     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1216   }
1217   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1218 
1219   starts3[0] = 0;
1220   nt         = 0;
1221   for (i=0; i<nrecvs2-1; i++) {
1222     starts3[i+1] = starts3[i] + lens2[i];
1223     nt          += lens2[i];
1224   }
1225   if (nrecvs2) nt += lens2[nrecvs2-1];
1226 
1227   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1228   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1229   for (i=0; i<nrecvs2; i++) {
1230     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1231   }
1232 
1233   /* send the messages */
1234   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1235   for (i=0; i<nsends2; i++) {
1236     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1237   }
1238 
1239   /* wait on receives */
1240   if (nrecvs2) {
1241     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1242     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1243     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1244   }
1245   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1246   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1247 
1248   if (debug) { /* -----------------------------------  */
1249     cnt = 0;
1250     for (i=0; i<nrecvs2; i++) {
1251       nt = recvs2[cnt++];
1252       for (j=0; j<nt; j++) {
1253         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1254         for (k=0; k<recvs2[cnt+1]; k++) {
1255           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1256         }
1257         cnt += 2 + recvs2[cnt+1];
1258         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1259       }
1260     }
1261     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1262   } /* -----------------------------------  */
1263 
1264   /* count number subdomains for each local node */
1265   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1266   cnt  = 0;
1267   for (i=0; i<nrecvs2; i++) {
1268     nt = recvs2[cnt++];
1269     for (j=0; j<nt; j++) {
1270       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1271       cnt += 2 + recvs2[cnt+1];
1272     }
1273   }
1274   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1275   *nproc    = nt;
1276   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1277   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1278   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1279   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1280   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1281   cnt  = 0;
1282   for (i=0; i<size; i++) {
1283     if (nprocs[i] > 0) {
1284       bprocs[i]        = cnt;
1285       (*procs)[cnt]    = i;
1286       (*numprocs)[cnt] = nprocs[i];
1287       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1288       cnt++;
1289     }
1290   }
1291 
1292   /* make the list of subdomains for each nontrivial local node */
1293   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1294   cnt  = 0;
1295   for (i=0; i<nrecvs2; i++) {
1296     nt = recvs2[cnt++];
1297     for (j=0; j<nt; j++) {
1298       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1299       cnt += 2 + recvs2[cnt+1];
1300     }
1301   }
1302   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1303   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1304 
1305   /* sort the node indexing by their global numbers */
1306   nt = *nproc;
1307   for (i=0; i<nt; i++) {
1308     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1309     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1310     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1311     ierr = PetscFree(tmp);CHKERRQ(ierr);
1312   }
1313 
1314   if (debug) { /* -----------------------------------  */
1315     nt = *nproc;
1316     for (i=0; i<nt; i++) {
1317       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1318       for (j=0; j<(*numprocs)[i]; j++) {
1319         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1320       }
1321       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1322     }
1323     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1324   } /* -----------------------------------  */
1325 
1326   /* wait on sends */
1327   if (nsends2) {
1328     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1329     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1330     ierr = PetscFree(send_status);CHKERRQ(ierr);
1331   }
1332 
1333   ierr = PetscFree(starts3);CHKERRQ(ierr);
1334   ierr = PetscFree(dest);CHKERRQ(ierr);
1335   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1336 
1337   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1338   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1339   ierr = PetscFree(starts);CHKERRQ(ierr);
1340   ierr = PetscFree(starts2);CHKERRQ(ierr);
1341   ierr = PetscFree(lens2);CHKERRQ(ierr);
1342 
1343   ierr = PetscFree(source);CHKERRQ(ierr);
1344   ierr = PetscFree(len);CHKERRQ(ierr);
1345   ierr = PetscFree(recvs);CHKERRQ(ierr);
1346   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1347   ierr = PetscFree(sends2);CHKERRQ(ierr);
1348 
1349   /* put the information about myself as the first entry in the list */
1350   first_procs    = (*procs)[0];
1351   first_numprocs = (*numprocs)[0];
1352   first_indices  = (*indices)[0];
1353   for (i=0; i<*nproc; i++) {
1354     if ((*procs)[i] == rank) {
1355       (*procs)[0]    = (*procs)[i];
1356       (*numprocs)[0] = (*numprocs)[i];
1357       (*indices)[0]  = (*indices)[i];
1358       (*procs)[i]    = first_procs;
1359       (*numprocs)[i] = first_numprocs;
1360       (*indices)[i]  = first_indices;
1361       break;
1362     }
1363   }
1364 
1365   /* save info for reuse */
1366   mapping->info_nproc = *nproc;
1367   mapping->info_procs = *procs;
1368   mapping->info_numprocs = *numprocs;
1369   mapping->info_indices = *indices;
1370   mapping->info_cached = PETSC_TRUE;
1371   PetscFunctionReturn(0);
1372 }
1373 
1374 /*@C
1375     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1376 
1377     Collective on ISLocalToGlobalMapping
1378 
1379     Input Parameters:
1380 .   mapping - the mapping from local to global indexing
1381 
1382     Output Parameter:
1383 +   nproc - number of processors that are connected to this one
1384 .   proc - neighboring processors
1385 .   numproc - number of indices for each processor
1386 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1387 
1388     Level: advanced
1389 
1390 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1391           ISLocalToGlobalMappingGetInfo()
1392 @*/
1393 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1394 {
1395   PetscErrorCode ierr;
1396 
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1399   if (mapping->info_free) {
1400     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1401     if (*indices) {
1402       PetscInt i;
1403 
1404       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1405       for (i=1; i<*nproc; i++) {
1406         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1407       }
1408       ierr = PetscFree(*indices);CHKERRQ(ierr);
1409     }
1410   }
1411   *nproc    = 0;
1412   *procs    = NULL;
1413   *numprocs = NULL;
1414   *indices  = NULL;
1415   PetscFunctionReturn(0);
1416 }
1417 
1418 /*@C
1419     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1420      each index shared by more than one processor
1421 
1422     Collective on ISLocalToGlobalMapping
1423 
1424     Input Parameters:
1425 .   mapping - the mapping from local to global indexing
1426 
1427     Output Parameter:
1428 +   nproc - number of processors that are connected to this one
1429 .   proc - neighboring processors
1430 .   numproc - number of indices for each subdomain (processor)
1431 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1432 
1433     Level: advanced
1434 
1435     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1436 
1437     Fortran Usage:
1438 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1439 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1440           PetscInt indices[nproc][numprocmax],ierr)
1441         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1442         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1443 
1444 
1445 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1446           ISLocalToGlobalMappingRestoreInfo()
1447 @*/
1448 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1449 {
1450   PetscErrorCode ierr;
1451   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1452 
1453   PetscFunctionBegin;
1454   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1455   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1456   if (bs > 1) { /* we need to expand the cached info */
1457     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1458     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1459     for (i=0; i<*nproc; i++) {
1460       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1461       for (j=0; j<bnumprocs[i]; j++) {
1462         for (k=0; k<bs; k++) {
1463           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1464         }
1465       }
1466       (*numprocs)[i] = bnumprocs[i]*bs;
1467     }
1468     mapping->info_free = PETSC_TRUE;
1469   } else {
1470     *numprocs = bnumprocs;
1471     *indices  = bindices;
1472   }
1473   PetscFunctionReturn(0);
1474 }
1475 
1476 /*@C
1477     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1478 
1479     Collective on ISLocalToGlobalMapping
1480 
1481     Input Parameters:
1482 .   mapping - the mapping from local to global indexing
1483 
1484     Output Parameter:
1485 +   nproc - number of processors that are connected to this one
1486 .   proc - neighboring processors
1487 .   numproc - number of indices for each processor
1488 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1489 
1490     Level: advanced
1491 
1492 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1493           ISLocalToGlobalMappingGetInfo()
1494 @*/
1495 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1496 {
1497   PetscErrorCode ierr;
1498 
1499   PetscFunctionBegin;
1500   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 /*@C
1505     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1506 
1507     Collective on ISLocalToGlobalMapping
1508 
1509     Input Parameters:
1510 .   mapping - the mapping from local to global indexing
1511 
1512     Output Parameter:
1513 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1514 .   count - number of neighboring processors per node
1515 -   indices - indices of processes sharing the node (sorted)
1516 
1517     Level: advanced
1518 
1519     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1520 
1521 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1522           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1523 @*/
1524 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1525 {
1526   PetscInt       n;
1527   PetscErrorCode ierr;
1528 
1529   PetscFunctionBegin;
1530   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1531   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1532   if (!mapping->info_nodec) {
1533     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1534 
1535     ierr = PetscCalloc1(n+1,&mapping->info_nodec);CHKERRQ(ierr); /* always allocate to flag setup */
1536     ierr = PetscMalloc1(n,&mapping->info_nodei);CHKERRQ(ierr);
1537     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1538     for (i=0,m=0;i<n;i++) { mapping->info_nodec[i] = 1; m++; }
1539     for (i=1;i<n_neigh;i++) {
1540       PetscInt j;
1541 
1542       m += n_shared[i];
1543       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1544     }
1545     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1546     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1547     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1548     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1549     for (i=1;i<n_neigh;i++) {
1550       PetscInt j;
1551 
1552       for (j=0;j<n_shared[i];j++) {
1553         PetscInt k = shared[i][j];
1554 
1555         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1556         mapping->info_nodec[k] += 1;
1557       }
1558     }
1559     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1560     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1561   }
1562   if (nnodes)  *nnodes  = n;
1563   if (count)   *count   = mapping->info_nodec;
1564   if (indices) *indices = mapping->info_nodei;
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 /*@C
1569     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1570 
1571     Collective on ISLocalToGlobalMapping
1572 
1573     Input Parameters:
1574 .   mapping - the mapping from local to global indexing
1575 
1576     Output Parameter:
1577 +   nnodes - number of local nodes
1578 .   count - number of neighboring processors per node
1579 -   indices - indices of processes sharing the node (sorted)
1580 
1581     Level: advanced
1582 
1583 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1584           ISLocalToGlobalMappingGetInfo()
1585 @*/
1586 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1587 {
1588   PetscFunctionBegin;
1589   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1590   if (nnodes)  *nnodes  = 0;
1591   if (count)   *count   = NULL;
1592   if (indices) *indices = NULL;
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 /*@C
1597    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1598 
1599    Not Collective
1600 
1601    Input Arguments:
1602 . ltog - local to global mapping
1603 
1604    Output Arguments:
1605 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1606 
1607    Level: advanced
1608 
1609    Notes:
1610     ISLocalToGlobalMappingGetSize() returns the length the this array
1611 
1612 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1613 @*/
1614 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1615 {
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1618   PetscValidPointer(array,2);
1619   if (ltog->bs == 1) {
1620     *array = ltog->indices;
1621   } else {
1622     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1623     const PetscInt *ii;
1624     PetscErrorCode ierr;
1625 
1626     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1627     *array = jj;
1628     k    = 0;
1629     ii   = ltog->indices;
1630     for (i=0; i<n; i++)
1631       for (j=0; j<bs; j++)
1632         jj[k++] = bs*ii[i] + j;
1633   }
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 /*@C
1638    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1639 
1640    Not Collective
1641 
1642    Input Arguments:
1643 + ltog - local to global mapping
1644 - array - array of indices
1645 
1646    Level: advanced
1647 
1648 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1649 @*/
1650 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1651 {
1652   PetscFunctionBegin;
1653   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1654   PetscValidPointer(array,2);
1655   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1656 
1657   if (ltog->bs > 1) {
1658     PetscErrorCode ierr;
1659     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1660   }
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 /*@C
1665    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1666 
1667    Not Collective
1668 
1669    Input Arguments:
1670 . ltog - local to global mapping
1671 
1672    Output Arguments:
1673 . array - array of indices
1674 
1675    Level: advanced
1676 
1677 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1678 @*/
1679 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1680 {
1681   PetscFunctionBegin;
1682   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1683   PetscValidPointer(array,2);
1684   *array = ltog->indices;
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 /*@C
1689    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1690 
1691    Not Collective
1692 
1693    Input Arguments:
1694 + ltog - local to global mapping
1695 - array - array of indices
1696 
1697    Level: advanced
1698 
1699 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1700 @*/
1701 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1702 {
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1705   PetscValidPointer(array,2);
1706   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1707   *array = NULL;
1708   PetscFunctionReturn(0);
1709 }
1710 
1711 /*@C
1712    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1713 
1714    Not Collective
1715 
1716    Input Arguments:
1717 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1718 . n - number of mappings to concatenate
1719 - ltogs - local to global mappings
1720 
1721    Output Arguments:
1722 . ltogcat - new mapping
1723 
1724    Note: this currently always returns a mapping with block size of 1
1725 
1726    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1727 
1728    Level: advanced
1729 
1730 .seealso: ISLocalToGlobalMappingCreate()
1731 @*/
1732 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1733 {
1734   PetscInt       i,cnt,m,*idx;
1735   PetscErrorCode ierr;
1736 
1737   PetscFunctionBegin;
1738   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1739   if (n > 0) PetscValidPointer(ltogs,3);
1740   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1741   PetscValidPointer(ltogcat,4);
1742   for (cnt=0,i=0; i<n; i++) {
1743     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1744     cnt += m;
1745   }
1746   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1747   for (cnt=0,i=0; i<n; i++) {
1748     const PetscInt *subidx;
1749     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1750     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1751     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1752     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1753     cnt += m;
1754   }
1755   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 /*MC
1760       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1761                                     used this is good for only small and moderate size problems.
1762 
1763    Options Database Keys:
1764 +   -islocaltoglobalmapping_type basic - select this method
1765 
1766    Level: beginner
1767 
1768 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1769 M*/
1770 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1771 {
1772   PetscFunctionBegin;
1773   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1774   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1775   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1776   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 /*MC
1781       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1782                                     used this is good for large memory problems.
1783 
1784    Options Database Keys:
1785 +   -islocaltoglobalmapping_type hash - select this method
1786 
1787 
1788    Notes:
1789     This is selected automatically for large problems if the user does not set the type.
1790 
1791    Level: beginner
1792 
1793 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1794 M*/
1795 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1796 {
1797   PetscFunctionBegin;
1798   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1799   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1800   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1801   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 
1806 /*@C
1807     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1808 
1809    Not Collective
1810 
1811    Input Parameters:
1812 +  sname - name of a new method
1813 -  routine_create - routine to create method context
1814 
1815    Notes:
1816    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1817 
1818    Sample usage:
1819 .vb
1820    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1821 .ve
1822 
1823    Then, your mapping can be chosen with the procedural interface via
1824 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1825    or at runtime via the option
1826 $     -islocaltoglobalmapping_type my_mapper
1827 
1828    Level: advanced
1829 
1830 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1831 
1832 @*/
1833 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1834 {
1835   PetscErrorCode ierr;
1836 
1837   PetscFunctionBegin;
1838   ierr = ISInitializePackage();CHKERRQ(ierr);
1839   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1840   PetscFunctionReturn(0);
1841 }
1842 
1843 /*@C
1844    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1845 
1846    Logically Collective on ISLocalToGlobalMapping
1847 
1848    Input Parameters:
1849 +  ltog - the ISLocalToGlobalMapping object
1850 -  type - a known method
1851 
1852    Options Database Key:
1853 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1854     of available methods (for instance, basic or hash)
1855 
1856    Notes:
1857    See "petsc/include/petscis.h" for available methods
1858 
1859   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1860   then set the ISLocalToGlobalMapping type from the options database rather than by using
1861   this routine.
1862 
1863   Level: intermediate
1864 
1865   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1866   are accessed by ISLocalToGlobalMappingSetType().
1867 
1868 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1869 
1870 @*/
1871 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1872 {
1873   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1874   PetscBool      match;
1875 
1876   PetscFunctionBegin;
1877   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1878   PetscValidCharPointer(type,2);
1879 
1880   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1881   if (match) PetscFunctionReturn(0);
1882 
1883   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1884   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1885   /* Destroy the previous private LTOG context */
1886   if (ltog->ops->destroy) {
1887     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1888     ltog->ops->destroy = NULL;
1889   }
1890   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1891   ierr = (*r)(ltog);CHKERRQ(ierr);
1892   PetscFunctionReturn(0);
1893 }
1894 
1895 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1896 
1897 /*@C
1898   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1899 
1900   Not Collective
1901 
1902   Level: advanced
1903 
1904 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1905 @*/
1906 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1907 {
1908   PetscErrorCode ierr;
1909 
1910   PetscFunctionBegin;
1911   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1912   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1913 
1914   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1915   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1916   PetscFunctionReturn(0);
1917 }
1918 
1919