xref: /petsc/src/vec/is/utils/isltog.c (revision 9a535baf3620b7b1d5229355c8a20d219c1a92fe)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47   PetscErrorCode ierr;
48 
49   PetscFunctionBeginHot;
50   *pStart = 0;
51   *points = NULL;
52   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
54   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
55   *pEnd   = *pStart + numCells;
56   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
57   PetscFunctionReturn(0);
58 }
59 
60 /*@C
61   ISRestorePointRange - Destroys the traversal description
62 
63   Not collective
64 
65   Input Parameters:
66 + pointIS - The IS object
67 . pStart  - The first index, from ISGetPointRange()
68 . pEnd    - One past the last index, from ISGetPointRange()
69 - points  - The indices, from ISGetPointRange()
70 
71   Notes:
72   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
73 $ ISGetPointRange(is, &pStart, &pEnd, &points);
74 $ for (p = pStart; p < pEnd; ++p) {
75 $   const PetscInt point = points ? points[p] : p;
76 $ }
77 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
78 
79   Level: intermediate
80 
81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
82 @*/
83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
84 {
85   PetscInt       step = 1;
86   PetscBool      isStride;
87   PetscErrorCode ierr;
88 
89   PetscFunctionBeginHot;
90   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
91   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
92   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
93   PetscFunctionReturn(0);
94 }
95 
96 /*@C
97   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
98 
99   Not collective
100 
101   Input Parameters:
102 + subpointIS - The IS object to be configured
103 . pStar   t  - The first index of the subrange
104 . pEnd       - One past the last index for the subrange
105 - points     - The indices for the entire range, from ISGetPointRange()
106 
107   Output Parameters:
108 . subpointIS - The IS object now configured to be a subrange
109 
110   Notes:
111   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
112 
113   Level: intermediate
114 
115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116 @*/
117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118 {
119   PetscErrorCode ierr;
120 
121   PetscFunctionBeginHot;
122   if (points) {
123     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
124     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
125   } else {
126     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
127     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 /* -----------------------------------------------------------------------------------------*/
133 
134 /*
135     Creates the global mapping information in the ISLocalToGlobalMapping structure
136 
137     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138 */
139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140 {
141   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   if (mapping->data) PetscFunctionReturn(0);
146   end   = 0;
147   start = PETSC_MAX_INT;
148 
149   for (i=0; i<n; i++) {
150     if (idx[i] < 0) continue;
151     if (idx[i] < start) start = idx[i];
152     if (idx[i] > end)   end   = idx[i];
153   }
154   if (start > end) {start = 0; end = -1;}
155   mapping->globalstart = start;
156   mapping->globalend   = end;
157   if (!((PetscObject)mapping)->type_name) {
158     if ((end - start) > PetscMax(4*n,1000000)) {
159       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
160     } else {
161       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
162     }
163   }
164   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169 {
170   PetscErrorCode              ierr;
171   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172   ISLocalToGlobalMapping_Basic *map;
173 
174   PetscFunctionBegin;
175   start            = mapping->globalstart;
176   end              = mapping->globalend;
177   ierr             = PetscNew(&map);CHKERRQ(ierr);
178   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
179   map->globals     = globals;
180   for (i=0; i<end-start+1; i++) globals[i] = -1;
181   for (i=0; i<n; i++) {
182     if (idx[i] < 0) continue;
183     globals[idx[i] - start] = i;
184   }
185   mapping->data = (void*)map;
186   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191 {
192   PetscErrorCode              ierr;
193   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194   ISLocalToGlobalMapping_Hash *map;
195 
196   PetscFunctionBegin;
197   ierr = PetscNew(&map);CHKERRQ(ierr);
198   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
199   for (i=0; i<n; i++) {
200     if (idx[i] < 0) continue;
201     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
202   }
203   mapping->data = (void*)map;
204   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209 {
210   PetscErrorCode              ierr;
211   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
212 
213   PetscFunctionBegin;
214   if (!map) PetscFunctionReturn(0);
215   ierr = PetscFree(map->globals);CHKERRQ(ierr);
216   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221 {
222   PetscErrorCode              ierr;
223   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
224 
225   PetscFunctionBegin;
226   if (!map) PetscFunctionReturn(0);
227   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
228   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME _Basic
234 #define GTOLBS mapping->bs
235 #define GTOL(g, local) do {                  \
236     local = map->globals[g/bs - start];      \
237     if (local >= 0) local = bs*local + (g % bs); \
238   } while (0)
239 
240 #include <../src/vec/is/utils/isltog.h>
241 
242 #define GTOLTYPE _Basic
243 #define GTOLNAME Block_Basic
244 #define GTOLBS 1
245 #define GTOL(g, local) do {                  \
246     local = map->globals[g - start];         \
247   } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME _Hash
252 #define GTOLBS mapping->bs
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255     if (local >= 0) local = bs*local + (g % bs);   \
256    } while (0)
257 #include <../src/vec/is/utils/isltog.h>
258 
259 #define GTOLTYPE _Hash
260 #define GTOLNAME Block_Hash
261 #define GTOLBS 1
262 #define GTOL(g, local) do {                         \
263     (void)PetscHMapIGet(map->globalht,g,&local);    \
264   } while (0)
265 #include <../src/vec/is/utils/isltog.h>
266 
267 /*@
268     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
269 
270     Not Collective
271 
272     Input Parameter:
273 .   ltog - local to global mapping
274 
275     Output Parameter:
276 .   nltog - the duplicated local to global mapping
277 
278     Level: advanced
279 
280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281 @*/
282 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283 {
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
288   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291 
292 /*@
293     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
294 
295     Not Collective
296 
297     Input Parameter:
298 .   ltog - local to global mapping
299 
300     Output Parameter:
301 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
302 
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
311   PetscValidIntPointer(n,2);
312   *n = mapping->bs*mapping->n;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@C
317    ISLocalToGlobalMappingViewFromOptions - View from Options
318 
319    Collective on ISLocalToGlobalMapping
320 
321    Input Parameters:
322 +  A - the local to global mapping object
323 .  obj - Optional object
324 -  name - command line option
325 
326    Level: intermediate
327 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
328 @*/
329 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
335   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 /*@C
340     ISLocalToGlobalMappingView - View a local to global mapping
341 
342     Not Collective
343 
344     Input Parameters:
345 +   ltog - local to global mapping
346 -   viewer - viewer
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
351 @*/
352 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
353 {
354   PetscInt       i;
355   PetscMPIInt    rank;
356   PetscBool      iascii;
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
361   if (!viewer) {
362     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
363   }
364   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
365 
366   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr);
367   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
368   if (iascii) {
369     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
370     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
371     for (i=0; i<mapping->n; i++) {
372       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
373     }
374     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
375     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
382     ordering and a global parallel ordering.
383 
384     Not collective
385 
386     Input Parameter:
387 .   is - index set containing the global numbers for each local number
388 
389     Output Parameter:
390 .   mapping - new mapping data structure
391 
392     Notes:
393     the block size of the IS determines the block size of the mapping
394     Level: advanced
395 
396 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
397 @*/
398 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
399 {
400   PetscErrorCode ierr;
401   PetscInt       n,bs;
402   const PetscInt *indices;
403   MPI_Comm       comm;
404   PetscBool      isblock;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(is,IS_CLASSID,1);
408   PetscValidPointer(mapping,2);
409 
410   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
411   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
412   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
413   if (!isblock) {
414     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
415     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
416     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
417   } else {
418     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
419     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
420     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
421     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
422   }
423   PetscFunctionReturn(0);
424 }
425 
426 /*@C
427     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
428     ordering and a global parallel ordering.
429 
430     Collective
431 
432     Input Parameters:
433 +   sf - star forest mapping contiguous local indices to (rank, offset)
434 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
435 
436     Output Parameter:
437 .   mapping - new mapping data structure
438 
439     Level: advanced
440 
441     Notes:
442     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
443 
444 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
445 @*/
446 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
447 {
448   PetscErrorCode ierr;
449   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
450   const PetscInt *ilocal;
451   MPI_Comm       comm;
452 
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
455   PetscValidPointer(mapping,3);
456 
457   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
458   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
459   if (start == PETSC_DECIDE) {
460     start = 0;
461     ierr = MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr);
462   } else if (start < 0) SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
463   if (ilocal) {
464     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
465   }
466   else maxlocal = nleaves;
467   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
468   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
469   for (i=0; i<nroots; i++) globals[i] = start + i;
470   for (i=0; i<maxlocal; i++) ltog[i] = -1;
471   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
472   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
473   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
474   ierr = PetscFree(globals);CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 /*@
479     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
480 
481     Not collective
482 
483     Input Parameters:
484 +   mapping - mapping data structure
485 -   bs - the blocksize
486 
487     Level: advanced
488 
489 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
490 @*/
491 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
492 {
493   PetscInt       *nid;
494   const PetscInt *oid;
495   PetscInt       i,cn,on,obs,nn;
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
500   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
501   if (bs == mapping->bs) PetscFunctionReturn(0);
502   on  = mapping->n;
503   obs = mapping->bs;
504   oid = mapping->indices;
505   nn  = (on*obs)/bs;
506   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
507 
508   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
509   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
510   for (i=0;i<nn;i++) {
511     PetscInt j;
512     for (j=0,cn=0;j<bs-1;j++) {
513       if (oid[i*bs+j] < 0) { cn++; continue; }
514       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
515     }
516     if (oid[i*bs+j] < 0) cn++;
517     if (cn) {
518       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
519       nid[i] = -1;
520     } else {
521       nid[i] = oid[i*bs]/bs;
522     }
523   }
524   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
525 
526   mapping->n           = nn;
527   mapping->bs          = bs;
528   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
529   mapping->indices     = nid;
530   mapping->globalstart = 0;
531   mapping->globalend   = 0;
532 
533   /* reset the cached information */
534   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
535   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
536   if (mapping->info_indices) {
537     PetscInt i;
538 
539     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
540     for (i=1; i<mapping->info_nproc; i++) {
541       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
542     }
543     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
544   }
545   mapping->info_cached = PETSC_FALSE;
546 
547   if (mapping->ops->destroy) {
548     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 /*@
554     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
555     ordering and a global parallel ordering.
556 
557     Not Collective
558 
559     Input Parameters:
560 .   mapping - mapping data structure
561 
562     Output Parameter:
563 .   bs - the blocksize
564 
565     Level: advanced
566 
567 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
568 @*/
569 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
570 {
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
573   *bs = mapping->bs;
574   PetscFunctionReturn(0);
575 }
576 
577 /*@
578     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
579     ordering and a global parallel ordering.
580 
581     Not Collective, but communicator may have more than one process
582 
583     Input Parameters:
584 +   comm - MPI communicator
585 .   bs - the block size
586 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
587 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
588 -   mode - see PetscCopyMode
589 
590     Output Parameter:
591 .   mapping - new mapping data structure
592 
593     Notes:
594     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
595 
596     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
597     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
598     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
599 
600     Level: advanced
601 
602 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
603           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
604 @*/
605 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
606 {
607   PetscErrorCode ierr;
608   PetscInt       *in;
609 
610   PetscFunctionBegin;
611   if (n) PetscValidIntPointer(indices,4);
612   PetscValidPointer(mapping,6);
613 
614   *mapping = NULL;
615   ierr = ISInitializePackage();CHKERRQ(ierr);
616 
617   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
618                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
619   (*mapping)->n             = n;
620   (*mapping)->bs            = bs;
621   (*mapping)->info_cached   = PETSC_FALSE;
622   (*mapping)->info_free     = PETSC_FALSE;
623   (*mapping)->info_procs    = NULL;
624   (*mapping)->info_numprocs = NULL;
625   (*mapping)->info_indices  = NULL;
626   (*mapping)->info_nodec    = NULL;
627   (*mapping)->info_nodei    = NULL;
628 
629   (*mapping)->ops->globaltolocalmappingapply      = NULL;
630   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
631   (*mapping)->ops->destroy                        = NULL;
632   if (mode == PETSC_COPY_VALUES) {
633     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
634     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
635     (*mapping)->indices = in;
636     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
637   } else if (mode == PETSC_OWN_POINTER) {
638     (*mapping)->indices = (PetscInt*)indices;
639     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
640   }
641   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
642   PetscFunctionReturn(0);
643 }
644 
645 PetscFunctionList ISLocalToGlobalMappingList = NULL;
646 
647 /*@
648    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
649 
650    Not collective
651 
652    Input Parameters:
653 .  mapping - mapping data structure
654 
655    Level: advanced
656 
657 @*/
658 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
659 {
660   PetscErrorCode             ierr;
661   char                       type[256];
662   ISLocalToGlobalMappingType defaulttype = "Not set";
663   PetscBool                  flg;
664 
665   PetscFunctionBegin;
666   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
667   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
668   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
669   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
670   if (flg) {
671     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
672   }
673   ierr = PetscOptionsEnd();CHKERRQ(ierr);
674   PetscFunctionReturn(0);
675 }
676 
677 /*@
678    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
679    ordering and a global parallel ordering.
680 
681    Note Collective
682 
683    Input Parameters:
684 .  mapping - mapping data structure
685 
686    Level: advanced
687 
688 .seealso: ISLocalToGlobalMappingCreate()
689 @*/
690 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
691 {
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   if (!*mapping) PetscFunctionReturn(0);
696   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
697   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
698   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
699   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
700   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
701   if ((*mapping)->info_indices) {
702     PetscInt i;
703 
704     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
705     for (i=1; i<(*mapping)->info_nproc; i++) {
706       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
707     }
708     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
709   }
710   if ((*mapping)->info_nodei) {
711     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
712   }
713   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
714   if ((*mapping)->ops->destroy) {
715     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
716   }
717   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
718   *mapping = NULL;
719   PetscFunctionReturn(0);
720 }
721 
722 /*@
723     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
724     a new index set using the global numbering defined in an ISLocalToGlobalMapping
725     context.
726 
727     Collective on is
728 
729     Input Parameters:
730 +   mapping - mapping between local and global numbering
731 -   is - index set in local numbering
732 
733     Output Parameters:
734 .   newis - index set in global numbering
735 
736     Notes:
737     The output IS will have the same communicator of the input IS.
738 
739     Level: advanced
740 
741 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
742           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
743 @*/
744 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
745 {
746   PetscErrorCode ierr;
747   PetscInt       n,*idxout;
748   const PetscInt *idxin;
749 
750   PetscFunctionBegin;
751   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
752   PetscValidHeaderSpecific(is,IS_CLASSID,2);
753   PetscValidPointer(newis,3);
754 
755   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
756   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
757   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
758   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
759   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
760   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
761   PetscFunctionReturn(0);
762 }
763 
764 /*@
765    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
766    and converts them to the global numbering.
767 
768    Not collective
769 
770    Input Parameters:
771 +  mapping - the local to global mapping context
772 .  N - number of integers
773 -  in - input indices in local numbering
774 
775    Output Parameter:
776 .  out - indices in global numbering
777 
778    Notes:
779    The in and out array parameters may be identical.
780 
781    Level: advanced
782 
783 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
784           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
785           AOPetscToApplication(), ISGlobalToLocalMappingApply()
786 
787 @*/
788 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
789 {
790   PetscInt i,bs,Nmax;
791 
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
794   bs   = mapping->bs;
795   Nmax = bs*mapping->n;
796   if (bs == 1) {
797     const PetscInt *idx = mapping->indices;
798     for (i=0; i<N; i++) {
799       if (in[i] < 0) {
800         out[i] = in[i];
801         continue;
802       }
803       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
804       out[i] = idx[in[i]];
805     }
806   } else {
807     const PetscInt *idx = mapping->indices;
808     for (i=0; i<N; i++) {
809       if (in[i] < 0) {
810         out[i] = in[i];
811         continue;
812       }
813       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
814       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
815     }
816   }
817   PetscFunctionReturn(0);
818 }
819 
820 /*@
821    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
822 
823    Not collective
824 
825    Input Parameters:
826 +  mapping - the local to global mapping context
827 .  N - number of integers
828 -  in - input indices in local block numbering
829 
830    Output Parameter:
831 .  out - indices in global block numbering
832 
833    Notes:
834    The in and out array parameters may be identical.
835 
836    Example:
837      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
838      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
839 
840    Level: advanced
841 
842 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
843           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
844           AOPetscToApplication(), ISGlobalToLocalMappingApply()
845 
846 @*/
847 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
848 {
849 
850   PetscFunctionBegin;
851   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
852   {
853     PetscInt i,Nmax = mapping->n;
854     const PetscInt *idx = mapping->indices;
855 
856     for (i=0; i<N; i++) {
857       if (in[i] < 0) {
858         out[i] = in[i];
859         continue;
860       }
861       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
862       out[i] = idx[in[i]];
863     }
864   }
865   PetscFunctionReturn(0);
866 }
867 
868 /*@
869     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
870     specified with a global numbering.
871 
872     Not collective
873 
874     Input Parameters:
875 +   mapping - mapping between local and global numbering
876 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
877            IS_GTOLM_DROP - drops the indices with no local value from the output list
878 .   n - number of global indices to map
879 -   idx - global indices to map
880 
881     Output Parameters:
882 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
883 -   idxout - local index of each global index, one must pass in an array long enough
884              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
885              idxout == NULL to determine the required length (returned in nout)
886              and then allocate the required space and call ISGlobalToLocalMappingApply()
887              a second time to set the values.
888 
889     Notes:
890     Either nout or idxout may be NULL. idx and idxout may be identical.
891 
892     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
893     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
894     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
895 
896     Level: advanced
897 
898     Developer Note: The manual page states that idx and idxout may be identical but the calling
899        sequence declares idx as const so it cannot be the same as idxout.
900 
901 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
902           ISLocalToGlobalMappingDestroy()
903 @*/
904 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
905 {
906   PetscErrorCode ierr;
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
910   if (!mapping->data) {
911     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
912   }
913   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
914   PetscFunctionReturn(0);
915 }
916 
917 /*@
918     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
919     a new index set using the local numbering defined in an ISLocalToGlobalMapping
920     context.
921 
922     Not collective
923 
924     Input Parameters:
925 +   mapping - mapping between local and global numbering
926 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
927            IS_GTOLM_DROP - drops the indices with no local value from the output list
928 -   is - index set in global numbering
929 
930     Output Parameters:
931 .   newis - index set in local numbering
932 
933     Notes:
934     The output IS will be sequential, as it encodes a purely local operation
935 
936     Level: advanced
937 
938 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
939           ISLocalToGlobalMappingDestroy()
940 @*/
941 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
942 {
943   PetscErrorCode ierr;
944   PetscInt       n,nout,*idxout;
945   const PetscInt *idxin;
946 
947   PetscFunctionBegin;
948   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
949   PetscValidHeaderSpecific(is,IS_CLASSID,3);
950   PetscValidPointer(newis,4);
951 
952   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
953   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
954   if (type == IS_GTOLM_MASK) {
955     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
956   } else {
957     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
958     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
959   }
960   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
961   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
962   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
963   PetscFunctionReturn(0);
964 }
965 
966 /*@
967     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
968     specified with a block global numbering.
969 
970     Not collective
971 
972     Input Parameters:
973 +   mapping - mapping between local and global numbering
974 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
975            IS_GTOLM_DROP - drops the indices with no local value from the output list
976 .   n - number of global indices to map
977 -   idx - global indices to map
978 
979     Output Parameters:
980 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
981 -   idxout - local index of each global index, one must pass in an array long enough
982              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
983              idxout == NULL to determine the required length (returned in nout)
984              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
985              a second time to set the values.
986 
987     Notes:
988     Either nout or idxout may be NULL. idx and idxout may be identical.
989 
990     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
991     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
992     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
993 
994     Level: advanced
995 
996     Developer Note: The manual page states that idx and idxout may be identical but the calling
997        sequence declares idx as const so it cannot be the same as idxout.
998 
999 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
1000           ISLocalToGlobalMappingDestroy()
1001 @*/
1002 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
1003                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
1004 {
1005   PetscErrorCode ierr;
1006 
1007   PetscFunctionBegin;
1008   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1009   if (!mapping->data) {
1010     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
1011   }
1012   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
1013   PetscFunctionReturn(0);
1014 }
1015 
1016 /*@C
1017     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1018      each index shared by more than one processor
1019 
1020     Collective on ISLocalToGlobalMapping
1021 
1022     Input Parameter:
1023 .   mapping - the mapping from local to global indexing
1024 
1025     Output Parameters:
1026 +   nproc - number of processors that are connected to this one
1027 .   proc - neighboring processors
1028 .   numproc - number of indices for each subdomain (processor)
1029 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1030 
1031     Level: advanced
1032 
1033     Fortran Usage:
1034 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1035 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1036           PetscInt indices[nproc][numprocmax],ierr)
1037         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1038         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1039 
1040 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1041           ISLocalToGlobalMappingRestoreInfo()
1042 @*/
1043 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1044 {
1045   PetscErrorCode ierr;
1046 
1047   PetscFunctionBegin;
1048   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1049   if (mapping->info_cached) {
1050     *nproc    = mapping->info_nproc;
1051     *procs    = mapping->info_procs;
1052     *numprocs = mapping->info_numprocs;
1053     *indices  = mapping->info_indices;
1054   } else {
1055     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1056   }
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1061 {
1062   PetscErrorCode ierr;
1063   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1064   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1065   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1066   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1067   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1068   PetscInt       first_procs,first_numprocs,*first_indices;
1069   MPI_Request    *recv_waits,*send_waits;
1070   MPI_Status     recv_status,*send_status,*recv_statuses;
1071   MPI_Comm       comm;
1072   PetscBool      debug = PETSC_FALSE;
1073 
1074   PetscFunctionBegin;
1075   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1076   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1077   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1078   if (size == 1) {
1079     *nproc         = 0;
1080     *procs         = NULL;
1081     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1082     (*numprocs)[0] = 0;
1083     ierr           = PetscNew(indices);CHKERRQ(ierr);
1084     (*indices)[0]  = NULL;
1085     /* save info for reuse */
1086     mapping->info_nproc = *nproc;
1087     mapping->info_procs = *procs;
1088     mapping->info_numprocs = *numprocs;
1089     mapping->info_indices = *indices;
1090     mapping->info_cached = PETSC_TRUE;
1091     PetscFunctionReturn(0);
1092   }
1093 
1094   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1095 
1096   /*
1097     Notes on ISLocalToGlobalMappingGetBlockInfo
1098 
1099     globally owned node - the nodes that have been assigned to this processor in global
1100            numbering, just for this routine.
1101 
1102     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1103            boundary (i.e. is has more than one local owner)
1104 
1105     locally owned node - node that exists on this processors subdomain
1106 
1107     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1108            local subdomain
1109   */
1110   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1111   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1112   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1113 
1114   for (i=0; i<n; i++) {
1115     if (lindices[i] > max) max = lindices[i];
1116   }
1117   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRMPI(ierr);
1118   Ng++;
1119   ierr   = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1120   ierr   = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1121   scale  = Ng/size + 1;
1122   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1123   rstart = scale*rank;
1124 
1125   /* determine ownership ranges of global indices */
1126   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1127   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1128 
1129   /* determine owners of each local node  */
1130   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1131   for (i=0; i<n; i++) {
1132     proc             = lindices[i]/scale; /* processor that globally owns this index */
1133     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1134     owner[i]         = proc;
1135     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1136   }
1137   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1138   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1139 
1140   /* inform other processors of number of messages and max length*/
1141   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1142   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1143 
1144   /* post receives for owned rows */
1145   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1146   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1147   for (i=0; i<nrecvs; i++) {
1148     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr);
1149   }
1150 
1151   /* pack messages containing lists of local nodes to owners */
1152   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1153   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1154   starts[0] = 0;
1155   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1156   for (i=0; i<n; i++) {
1157     sends[starts[owner[i]]++] = lindices[i];
1158     sends[starts[owner[i]]++] = i;
1159   }
1160   ierr = PetscFree(owner);CHKERRQ(ierr);
1161   starts[0] = 0;
1162   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1163 
1164   /* send the messages */
1165   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1166   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1167   cnt = 0;
1168   for (i=0; i<size; i++) {
1169     if (nprocs[2*i]) {
1170       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr);
1171       dest[cnt] = i;
1172       cnt++;
1173     }
1174   }
1175   ierr = PetscFree(starts);CHKERRQ(ierr);
1176 
1177   /* wait on receives */
1178   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1179   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1180   cnt  = nrecvs;
1181   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1182   while (cnt) {
1183     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
1184     /* unpack receives into our local space */
1185     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr);
1186     source[imdex] = recv_status.MPI_SOURCE;
1187     len[imdex]    = len[imdex]/2;
1188     /* count how many local owners for each of my global owned indices */
1189     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1190     cnt--;
1191   }
1192   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1193 
1194   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1195   nowned  = 0;
1196   nownedm = 0;
1197   for (i=0; i<ng; i++) {
1198     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1199   }
1200 
1201   /* create single array to contain rank of all local owners of each globally owned index */
1202   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1203   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1204   starts[0] = 0;
1205   for (i=1; i<ng; i++) {
1206     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1207     else starts[i] = starts[i-1];
1208   }
1209 
1210   /* for each nontrival globally owned node list all arriving processors */
1211   for (i=0; i<nrecvs; i++) {
1212     for (j=0; j<len[i]; j++) {
1213       node = recvs[2*i*nmax+2*j]-rstart;
1214       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1215     }
1216   }
1217 
1218   if (debug) { /* -----------------------------------  */
1219     starts[0] = 0;
1220     for (i=1; i<ng; i++) {
1221       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1222       else starts[i] = starts[i-1];
1223     }
1224     for (i=0; i<ng; i++) {
1225       if (nownedsenders[i] > 1) {
1226         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1227         for (j=0; j<nownedsenders[i]; j++) {
1228           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1229         }
1230         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1231       }
1232     }
1233     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1234   } /* -----------------------------------  */
1235 
1236   /* wait on original sends */
1237   if (nsends) {
1238     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1239     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
1240     ierr = PetscFree(send_status);CHKERRQ(ierr);
1241   }
1242   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1243   ierr = PetscFree(sends);CHKERRQ(ierr);
1244   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1245 
1246   /* pack messages to send back to local owners */
1247   starts[0] = 0;
1248   for (i=1; i<ng; i++) {
1249     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1250     else starts[i] = starts[i-1];
1251   }
1252   nsends2 = nrecvs;
1253   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1254   for (i=0; i<nrecvs; i++) {
1255     nprocs[i] = 1;
1256     for (j=0; j<len[i]; j++) {
1257       node = recvs[2*i*nmax+2*j]-rstart;
1258       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1259     }
1260   }
1261   nt = 0;
1262   for (i=0; i<nsends2; i++) nt += nprocs[i];
1263 
1264   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1265   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1266 
1267   starts2[0] = 0;
1268   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1269   /*
1270      Each message is 1 + nprocs[i] long, and consists of
1271        (0) the number of nodes being sent back
1272        (1) the local node number,
1273        (2) the number of processors sharing it,
1274        (3) the processors sharing it
1275   */
1276   for (i=0; i<nsends2; i++) {
1277     cnt = 1;
1278     sends2[starts2[i]] = 0;
1279     for (j=0; j<len[i]; j++) {
1280       node = recvs[2*i*nmax+2*j]-rstart;
1281       if (nownedsenders[node] > 1) {
1282         sends2[starts2[i]]++;
1283         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1284         sends2[starts2[i]+cnt++] = nownedsenders[node];
1285         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1286         cnt += nownedsenders[node];
1287       }
1288     }
1289   }
1290 
1291   /* receive the message lengths */
1292   nrecvs2 = nsends;
1293   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1294   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1295   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1296   for (i=0; i<nrecvs2; i++) {
1297     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr);
1298   }
1299 
1300   /* send the message lengths */
1301   for (i=0; i<nsends2; i++) {
1302     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr);
1303   }
1304 
1305   /* wait on receives of lens */
1306   if (nrecvs2) {
1307     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1308     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1309     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1310   }
1311   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1312 
1313   starts3[0] = 0;
1314   nt         = 0;
1315   for (i=0; i<nrecvs2-1; i++) {
1316     starts3[i+1] = starts3[i] + lens2[i];
1317     nt          += lens2[i];
1318   }
1319   if (nrecvs2) nt += lens2[nrecvs2-1];
1320 
1321   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1322   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1323   for (i=0; i<nrecvs2; i++) {
1324     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr);
1325   }
1326 
1327   /* send the messages */
1328   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1329   for (i=0; i<nsends2; i++) {
1330     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr);
1331   }
1332 
1333   /* wait on receives */
1334   if (nrecvs2) {
1335     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1336     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1337     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1338   }
1339   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1340   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1341 
1342   if (debug) { /* -----------------------------------  */
1343     cnt = 0;
1344     for (i=0; i<nrecvs2; i++) {
1345       nt = recvs2[cnt++];
1346       for (j=0; j<nt; j++) {
1347         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1348         for (k=0; k<recvs2[cnt+1]; k++) {
1349           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1350         }
1351         cnt += 2 + recvs2[cnt+1];
1352         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1353       }
1354     }
1355     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1356   } /* -----------------------------------  */
1357 
1358   /* count number subdomains for each local node */
1359   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1360   cnt  = 0;
1361   for (i=0; i<nrecvs2; i++) {
1362     nt = recvs2[cnt++];
1363     for (j=0; j<nt; j++) {
1364       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1365       cnt += 2 + recvs2[cnt+1];
1366     }
1367   }
1368   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1369   *nproc    = nt;
1370   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1371   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1372   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1373   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1374   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1375   cnt  = 0;
1376   for (i=0; i<size; i++) {
1377     if (nprocs[i] > 0) {
1378       bprocs[i]        = cnt;
1379       (*procs)[cnt]    = i;
1380       (*numprocs)[cnt] = nprocs[i];
1381       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1382       cnt++;
1383     }
1384   }
1385 
1386   /* make the list of subdomains for each nontrivial local node */
1387   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1388   cnt  = 0;
1389   for (i=0; i<nrecvs2; i++) {
1390     nt = recvs2[cnt++];
1391     for (j=0; j<nt; j++) {
1392       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1393       cnt += 2 + recvs2[cnt+1];
1394     }
1395   }
1396   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1397   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1398 
1399   /* sort the node indexing by their global numbers */
1400   nt = *nproc;
1401   for (i=0; i<nt; i++) {
1402     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1403     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1404     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1405     ierr = PetscFree(tmp);CHKERRQ(ierr);
1406   }
1407 
1408   if (debug) { /* -----------------------------------  */
1409     nt = *nproc;
1410     for (i=0; i<nt; i++) {
1411       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1412       for (j=0; j<(*numprocs)[i]; j++) {
1413         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1414       }
1415       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1416     }
1417     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1418   } /* -----------------------------------  */
1419 
1420   /* wait on sends */
1421   if (nsends2) {
1422     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1423     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr);
1424     ierr = PetscFree(send_status);CHKERRQ(ierr);
1425   }
1426 
1427   ierr = PetscFree(starts3);CHKERRQ(ierr);
1428   ierr = PetscFree(dest);CHKERRQ(ierr);
1429   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1430 
1431   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1432   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1433   ierr = PetscFree(starts);CHKERRQ(ierr);
1434   ierr = PetscFree(starts2);CHKERRQ(ierr);
1435   ierr = PetscFree(lens2);CHKERRQ(ierr);
1436 
1437   ierr = PetscFree(source);CHKERRQ(ierr);
1438   ierr = PetscFree(len);CHKERRQ(ierr);
1439   ierr = PetscFree(recvs);CHKERRQ(ierr);
1440   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1441   ierr = PetscFree(sends2);CHKERRQ(ierr);
1442 
1443   /* put the information about myself as the first entry in the list */
1444   first_procs    = (*procs)[0];
1445   first_numprocs = (*numprocs)[0];
1446   first_indices  = (*indices)[0];
1447   for (i=0; i<*nproc; i++) {
1448     if ((*procs)[i] == rank) {
1449       (*procs)[0]    = (*procs)[i];
1450       (*numprocs)[0] = (*numprocs)[i];
1451       (*indices)[0]  = (*indices)[i];
1452       (*procs)[i]    = first_procs;
1453       (*numprocs)[i] = first_numprocs;
1454       (*indices)[i]  = first_indices;
1455       break;
1456     }
1457   }
1458 
1459   /* save info for reuse */
1460   mapping->info_nproc = *nproc;
1461   mapping->info_procs = *procs;
1462   mapping->info_numprocs = *numprocs;
1463   mapping->info_indices = *indices;
1464   mapping->info_cached = PETSC_TRUE;
1465   PetscFunctionReturn(0);
1466 }
1467 
1468 /*@C
1469     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1470 
1471     Collective on ISLocalToGlobalMapping
1472 
1473     Input Parameter:
1474 .   mapping - the mapping from local to global indexing
1475 
1476     Output Parameters:
1477 +   nproc - number of processors that are connected to this one
1478 .   proc - neighboring processors
1479 .   numproc - number of indices for each processor
1480 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1481 
1482     Level: advanced
1483 
1484 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1485           ISLocalToGlobalMappingGetInfo()
1486 @*/
1487 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1488 {
1489   PetscErrorCode ierr;
1490 
1491   PetscFunctionBegin;
1492   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1493   if (mapping->info_free) {
1494     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1495     if (*indices) {
1496       PetscInt i;
1497 
1498       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1499       for (i=1; i<*nproc; i++) {
1500         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1501       }
1502       ierr = PetscFree(*indices);CHKERRQ(ierr);
1503     }
1504   }
1505   *nproc    = 0;
1506   *procs    = NULL;
1507   *numprocs = NULL;
1508   *indices  = NULL;
1509   PetscFunctionReturn(0);
1510 }
1511 
1512 /*@C
1513     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1514      each index shared by more than one processor
1515 
1516     Collective on ISLocalToGlobalMapping
1517 
1518     Input Parameter:
1519 .   mapping - the mapping from local to global indexing
1520 
1521     Output Parameters:
1522 +   nproc - number of processors that are connected to this one
1523 .   proc - neighboring processors
1524 .   numproc - number of indices for each subdomain (processor)
1525 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1526 
1527     Level: advanced
1528 
1529     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1530 
1531     Fortran Usage:
1532 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1533 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1534           PetscInt indices[nproc][numprocmax],ierr)
1535         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1536         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1537 
1538 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1539           ISLocalToGlobalMappingRestoreInfo()
1540 @*/
1541 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1542 {
1543   PetscErrorCode ierr;
1544   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1545 
1546   PetscFunctionBegin;
1547   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1548   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1549   if (bs > 1) { /* we need to expand the cached info */
1550     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1551     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1552     for (i=0; i<*nproc; i++) {
1553       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1554       for (j=0; j<bnumprocs[i]; j++) {
1555         for (k=0; k<bs; k++) {
1556           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1557         }
1558       }
1559       (*numprocs)[i] = bnumprocs[i]*bs;
1560     }
1561     mapping->info_free = PETSC_TRUE;
1562   } else {
1563     *numprocs = bnumprocs;
1564     *indices  = bindices;
1565   }
1566   PetscFunctionReturn(0);
1567 }
1568 
1569 /*@C
1570     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1571 
1572     Collective on ISLocalToGlobalMapping
1573 
1574     Input Parameter:
1575 .   mapping - the mapping from local to global indexing
1576 
1577     Output Parameters:
1578 +   nproc - number of processors that are connected to this one
1579 .   proc - neighboring processors
1580 .   numproc - number of indices for each processor
1581 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1582 
1583     Level: advanced
1584 
1585 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1586           ISLocalToGlobalMappingGetInfo()
1587 @*/
1588 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1589 {
1590   PetscErrorCode ierr;
1591 
1592   PetscFunctionBegin;
1593   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1594   PetscFunctionReturn(0);
1595 }
1596 
1597 /*@C
1598     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1599 
1600     Collective on ISLocalToGlobalMapping
1601 
1602     Input Parameter:
1603 .   mapping - the mapping from local to global indexing
1604 
1605     Output Parameters:
1606 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1607 .   count - number of neighboring processors per node
1608 -   indices - indices of processes sharing the node (sorted)
1609 
1610     Level: advanced
1611 
1612     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1613 
1614 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1615           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1616 @*/
1617 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1618 {
1619   PetscInt       n;
1620   PetscErrorCode ierr;
1621 
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1624   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1625   if (!mapping->info_nodec) {
1626     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1627 
1628     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1629     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1630     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1631     m = n;
1632     mapping->info_nodec[n] = 0;
1633     for (i=1;i<n_neigh;i++) {
1634       PetscInt j;
1635 
1636       m += n_shared[i];
1637       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1638     }
1639     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1640     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1641     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1642     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1643     for (i=1;i<n_neigh;i++) {
1644       PetscInt j;
1645 
1646       for (j=0;j<n_shared[i];j++) {
1647         PetscInt k = shared[i][j];
1648 
1649         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1650         mapping->info_nodec[k] += 1;
1651       }
1652     }
1653     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1654     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1655   }
1656   if (nnodes)  *nnodes  = n;
1657   if (count)   *count   = mapping->info_nodec;
1658   if (indices) *indices = mapping->info_nodei;
1659   PetscFunctionReturn(0);
1660 }
1661 
1662 /*@C
1663     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1664 
1665     Collective on ISLocalToGlobalMapping
1666 
1667     Input Parameter:
1668 .   mapping - the mapping from local to global indexing
1669 
1670     Output Parameters:
1671 +   nnodes - number of local nodes
1672 .   count - number of neighboring processors per node
1673 -   indices - indices of processes sharing the node (sorted)
1674 
1675     Level: advanced
1676 
1677 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1678           ISLocalToGlobalMappingGetInfo()
1679 @*/
1680 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1681 {
1682   PetscFunctionBegin;
1683   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1684   if (nnodes)  *nnodes  = 0;
1685   if (count)   *count   = NULL;
1686   if (indices) *indices = NULL;
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 /*@C
1691    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1692 
1693    Not Collective
1694 
1695    Input Parameter:
1696 . ltog - local to global mapping
1697 
1698    Output Parameter:
1699 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1700 
1701    Level: advanced
1702 
1703    Notes:
1704     ISLocalToGlobalMappingGetSize() returns the length the this array
1705 
1706 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1707 @*/
1708 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1709 {
1710   PetscFunctionBegin;
1711   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1712   PetscValidPointer(array,2);
1713   if (ltog->bs == 1) {
1714     *array = ltog->indices;
1715   } else {
1716     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1717     const PetscInt *ii;
1718     PetscErrorCode ierr;
1719 
1720     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1721     *array = jj;
1722     k    = 0;
1723     ii   = ltog->indices;
1724     for (i=0; i<n; i++)
1725       for (j=0; j<bs; j++)
1726         jj[k++] = bs*ii[i] + j;
1727   }
1728   PetscFunctionReturn(0);
1729 }
1730 
1731 /*@C
1732    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1733 
1734    Not Collective
1735 
1736    Input Parameters:
1737 + ltog - local to global mapping
1738 - array - array of indices
1739 
1740    Level: advanced
1741 
1742 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1743 @*/
1744 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1745 {
1746   PetscFunctionBegin;
1747   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1748   PetscValidPointer(array,2);
1749   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1750 
1751   if (ltog->bs > 1) {
1752     PetscErrorCode ierr;
1753     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1754   }
1755   PetscFunctionReturn(0);
1756 }
1757 
1758 /*@C
1759    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1760 
1761    Not Collective
1762 
1763    Input Parameter:
1764 . ltog - local to global mapping
1765 
1766    Output Parameter:
1767 . array - array of indices
1768 
1769    Level: advanced
1770 
1771 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1772 @*/
1773 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1774 {
1775   PetscFunctionBegin;
1776   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1777   PetscValidPointer(array,2);
1778   *array = ltog->indices;
1779   PetscFunctionReturn(0);
1780 }
1781 
1782 /*@C
1783    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1784 
1785    Not Collective
1786 
1787    Input Parameters:
1788 + ltog - local to global mapping
1789 - array - array of indices
1790 
1791    Level: advanced
1792 
1793 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1794 @*/
1795 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1796 {
1797   PetscFunctionBegin;
1798   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1799   PetscValidPointer(array,2);
1800   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1801   *array = NULL;
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 /*@C
1806    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1807 
1808    Not Collective
1809 
1810    Input Parameters:
1811 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1812 . n - number of mappings to concatenate
1813 - ltogs - local to global mappings
1814 
1815    Output Parameter:
1816 . ltogcat - new mapping
1817 
1818    Note: this currently always returns a mapping with block size of 1
1819 
1820    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1821 
1822    Level: advanced
1823 
1824 .seealso: ISLocalToGlobalMappingCreate()
1825 @*/
1826 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1827 {
1828   PetscInt       i,cnt,m,*idx;
1829   PetscErrorCode ierr;
1830 
1831   PetscFunctionBegin;
1832   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1833   if (n > 0) PetscValidPointer(ltogs,3);
1834   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1835   PetscValidPointer(ltogcat,4);
1836   for (cnt=0,i=0; i<n; i++) {
1837     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1838     cnt += m;
1839   }
1840   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1841   for (cnt=0,i=0; i<n; i++) {
1842     const PetscInt *subidx;
1843     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1844     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1845     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1846     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1847     cnt += m;
1848   }
1849   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1850   PetscFunctionReturn(0);
1851 }
1852 
1853 /*MC
1854       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1855                                     used this is good for only small and moderate size problems.
1856 
1857    Options Database Keys:
1858 .   -islocaltoglobalmapping_type basic - select this method
1859 
1860    Level: beginner
1861 
1862 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1863 M*/
1864 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1865 {
1866   PetscFunctionBegin;
1867   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1868   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1869   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1870   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1871   PetscFunctionReturn(0);
1872 }
1873 
1874 /*MC
1875       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1876                                     used this is good for large memory problems.
1877 
1878    Options Database Keys:
1879 .   -islocaltoglobalmapping_type hash - select this method
1880 
1881    Notes:
1882     This is selected automatically for large problems if the user does not set the type.
1883 
1884    Level: beginner
1885 
1886 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1887 M*/
1888 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1889 {
1890   PetscFunctionBegin;
1891   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1892   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1893   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1894   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1895   PetscFunctionReturn(0);
1896 }
1897 
1898 /*@C
1899     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1900 
1901    Not Collective
1902 
1903    Input Parameters:
1904 +  sname - name of a new method
1905 -  routine_create - routine to create method context
1906 
1907    Notes:
1908    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1909 
1910    Sample usage:
1911 .vb
1912    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1913 .ve
1914 
1915    Then, your mapping can be chosen with the procedural interface via
1916 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1917    or at runtime via the option
1918 $     -islocaltoglobalmapping_type my_mapper
1919 
1920    Level: advanced
1921 
1922 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1923 
1924 @*/
1925 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1926 {
1927   PetscErrorCode ierr;
1928 
1929   PetscFunctionBegin;
1930   ierr = ISInitializePackage();CHKERRQ(ierr);
1931   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1932   PetscFunctionReturn(0);
1933 }
1934 
1935 /*@C
1936    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1937 
1938    Logically Collective on ISLocalToGlobalMapping
1939 
1940    Input Parameters:
1941 +  ltog - the ISLocalToGlobalMapping object
1942 -  type - a known method
1943 
1944    Options Database Key:
1945 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1946     of available methods (for instance, basic or hash)
1947 
1948    Notes:
1949    See "petsc/include/petscis.h" for available methods
1950 
1951   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1952   then set the ISLocalToGlobalMapping type from the options database rather than by using
1953   this routine.
1954 
1955   Level: intermediate
1956 
1957   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1958   are accessed by ISLocalToGlobalMappingSetType().
1959 
1960 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1961 
1962 @*/
1963 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1964 {
1965   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1966   PetscBool      match;
1967 
1968   PetscFunctionBegin;
1969   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1970   PetscValidCharPointer(type,2);
1971 
1972   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1973   if (match) PetscFunctionReturn(0);
1974 
1975   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1976   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1977   /* Destroy the previous private LTOG context */
1978   if (ltog->ops->destroy) {
1979     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1980     ltog->ops->destroy = NULL;
1981   }
1982   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1983   ierr = (*r)(ltog);CHKERRQ(ierr);
1984   PetscFunctionReturn(0);
1985 }
1986 
1987 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1988 
1989 /*@C
1990   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1991 
1992   Not Collective
1993 
1994   Level: advanced
1995 
1996 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1997 @*/
1998 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1999 {
2000   PetscErrorCode ierr;
2001 
2002   PetscFunctionBegin;
2003   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
2004   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
2005 
2006   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
2007   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
2008   PetscFunctionReturn(0);
2009 }
2010