xref: /petsc/src/vec/is/utils/isltog.c (revision e8a52743c91e6b7db1476e414988ccd0447efb1f)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47   PetscErrorCode ierr;
48 
49   PetscFunctionBeginHot;
50   *pStart = 0;
51   *points = NULL;
52   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
54   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
55   *pEnd   = *pStart + numCells;
56   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
57   PetscFunctionReturn(0);
58 }
59 
60 /*@C
61   ISRestorePointRange - Destroys the traversal description
62 
63   Not collective
64 
65   Input Parameters:
66 + pointIS - The IS object
67 . pStart  - The first index, from ISGetPointRange()
68 . pEnd    - One past the last index, from ISGetPointRange()
69 - points  - The indices, from ISGetPointRange()
70 
71   Notes:
72   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
73 $ ISGetPointRange(is, &pStart, &pEnd, &points);
74 $ for (p = pStart; p < pEnd; ++p) {
75 $   const PetscInt point = points ? points[p] : p;
76 $ }
77 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
78 
79   Level: intermediate
80 
81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
82 @*/
83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
84 {
85   PetscInt       step = 1;
86   PetscBool      isStride;
87   PetscErrorCode ierr;
88 
89   PetscFunctionBeginHot;
90   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
91   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
92   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
93   PetscFunctionReturn(0);
94 }
95 
96 /*@C
97   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
98 
99   Not collective
100 
101   Input Parameters:
102 + subpointIS - The IS object to be configured
103 . pStar   t  - The first index of the subrange
104 . pEnd       - One past the last index for the subrange
105 - points     - The indices for the entire range, from ISGetPointRange()
106 
107   Output Parameters:
108 . subpointIS - The IS object now configured to be a subrange
109 
110   Notes:
111   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
112 
113   Level: intermediate
114 
115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116 @*/
117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118 {
119   PetscErrorCode ierr;
120 
121   PetscFunctionBeginHot;
122   if (points) {
123     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
124     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
125   } else {
126     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
127     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 /* -----------------------------------------------------------------------------------------*/
133 
134 /*
135     Creates the global mapping information in the ISLocalToGlobalMapping structure
136 
137     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138 */
139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140 {
141   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   if (mapping->data) PetscFunctionReturn(0);
146   end   = 0;
147   start = PETSC_MAX_INT;
148 
149   for (i=0; i<n; i++) {
150     if (idx[i] < 0) continue;
151     if (idx[i] < start) start = idx[i];
152     if (idx[i] > end)   end   = idx[i];
153   }
154   if (start > end) {start = 0; end = -1;}
155   mapping->globalstart = start;
156   mapping->globalend   = end;
157   if (!((PetscObject)mapping)->type_name) {
158     if ((end - start) > PetscMax(4*n,1000000)) {
159       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
160     } else {
161       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
162     }
163   }
164   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169 {
170   PetscErrorCode              ierr;
171   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172   ISLocalToGlobalMapping_Basic *map;
173 
174   PetscFunctionBegin;
175   start            = mapping->globalstart;
176   end              = mapping->globalend;
177   ierr             = PetscNew(&map);CHKERRQ(ierr);
178   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
179   map->globals     = globals;
180   for (i=0; i<end-start+1; i++) globals[i] = -1;
181   for (i=0; i<n; i++) {
182     if (idx[i] < 0) continue;
183     globals[idx[i] - start] = i;
184   }
185   mapping->data = (void*)map;
186   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191 {
192   PetscErrorCode              ierr;
193   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194   ISLocalToGlobalMapping_Hash *map;
195 
196   PetscFunctionBegin;
197   ierr = PetscNew(&map);CHKERRQ(ierr);
198   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
199   for (i=0; i<n; i++) {
200     if (idx[i] < 0) continue;
201     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
202   }
203   mapping->data = (void*)map;
204   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209 {
210   PetscErrorCode              ierr;
211   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
212 
213   PetscFunctionBegin;
214   if (!map) PetscFunctionReturn(0);
215   ierr = PetscFree(map->globals);CHKERRQ(ierr);
216   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221 {
222   PetscErrorCode              ierr;
223   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
224 
225   PetscFunctionBegin;
226   if (!map) PetscFunctionReturn(0);
227   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
228   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME _Basic
234 #define GTOLBS mapping->bs
235 #define GTOL(g, local) do {                  \
236     local = map->globals[g/bs - start];      \
237     if (local >= 0) local = bs*local + (g % bs); \
238   } while (0)
239 
240 #include <../src/vec/is/utils/isltog.h>
241 
242 #define GTOLTYPE _Basic
243 #define GTOLNAME Block_Basic
244 #define GTOLBS 1
245 #define GTOL(g, local) do {                  \
246     local = map->globals[g - start];         \
247   } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME _Hash
252 #define GTOLBS mapping->bs
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255     if (local >= 0) local = bs*local + (g % bs);   \
256    } while (0)
257 #include <../src/vec/is/utils/isltog.h>
258 
259 #define GTOLTYPE _Hash
260 #define GTOLNAME Block_Hash
261 #define GTOLBS 1
262 #define GTOL(g, local) do {                         \
263     (void)PetscHMapIGet(map->globalht,g,&local);    \
264   } while (0)
265 #include <../src/vec/is/utils/isltog.h>
266 
267 /*@
268     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
269 
270     Not Collective
271 
272     Input Parameter:
273 .   ltog - local to global mapping
274 
275     Output Parameter:
276 .   nltog - the duplicated local to global mapping
277 
278     Level: advanced
279 
280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281 @*/
282 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283 {
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
288   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291 
292 /*@
293     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
294 
295     Not Collective
296 
297     Input Parameter:
298 .   ltog - local to global mapping
299 
300     Output Parameter:
301 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
302 
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
311   PetscValidIntPointer(n,2);
312   *n = mapping->bs*mapping->n;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@C
317    ISLocalToGlobalMappingViewFromOptions - View from Options
318 
319    Collective on ISLocalToGlobalMapping
320 
321    Input Parameters:
322 +  A - the local to global mapping object
323 .  obj - Optional object
324 -  name - command line option
325 
326    Level: intermediate
327 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
328 @*/
329 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
335   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 /*@C
340     ISLocalToGlobalMappingView - View a local to global mapping
341 
342     Not Collective
343 
344     Input Parameters:
345 +   ltog - local to global mapping
346 -   viewer - viewer
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
351 @*/
352 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
353 {
354   PetscInt       i;
355   PetscMPIInt    rank;
356   PetscBool      iascii;
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
361   if (!viewer) {
362     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
363   }
364   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
365 
366   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr);
367   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
368   if (iascii) {
369     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
370     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
371     for (i=0; i<mapping->n; i++) {
372       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
373     }
374     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
375     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
382     ordering and a global parallel ordering.
383 
384     Not collective
385 
386     Input Parameter:
387 .   is - index set containing the global numbers for each local number
388 
389     Output Parameter:
390 .   mapping - new mapping data structure
391 
392     Notes:
393     the block size of the IS determines the block size of the mapping
394     Level: advanced
395 
396 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
397 @*/
398 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
399 {
400   PetscErrorCode ierr;
401   PetscInt       n,bs;
402   const PetscInt *indices;
403   MPI_Comm       comm;
404   PetscBool      isblock;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(is,IS_CLASSID,1);
408   PetscValidPointer(mapping,2);
409 
410   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
411   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
412   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
413   if (!isblock) {
414     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
415     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
416     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
417   } else {
418     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
419     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
420     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
421     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
422   }
423   PetscFunctionReturn(0);
424 }
425 
426 /*@C
427     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
428     ordering and a global parallel ordering.
429 
430     Collective
431 
432     Input Parameters:
433 +   sf - star forest mapping contiguous local indices to (rank, offset)
434 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
435 
436     Output Parameter:
437 .   mapping - new mapping data structure
438 
439     Level: advanced
440 
441     Notes:
442     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
443 
444 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
445 @*/
446 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
447 {
448   PetscErrorCode ierr;
449   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
450   const PetscInt *ilocal;
451   MPI_Comm       comm;
452 
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
455   PetscValidPointer(mapping,3);
456 
457   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
458   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
459   if (start == PETSC_DECIDE) {
460     start = 0;
461     ierr = MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr);
462   } else if (start < 0) SETERRQ(comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
463   if (ilocal) {
464     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
465   }
466   else maxlocal = nleaves;
467   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
468   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
469   for (i=0; i<nroots; i++) globals[i] = start + i;
470   for (i=0; i<maxlocal; i++) ltog[i] = -1;
471   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
472   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
473   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
474   ierr = PetscFree(globals);CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 /*@
479     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
480 
481     Not collective
482 
483     Input Parameters:
484 +   mapping - mapping data structure
485 -   bs - the blocksize
486 
487     Level: advanced
488 
489 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
490 @*/
491 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
492 {
493   PetscInt       *nid;
494   const PetscInt *oid;
495   PetscInt       i,cn,on,obs,nn;
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
500   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
501   if (bs == mapping->bs) PetscFunctionReturn(0);
502   on  = mapping->n;
503   obs = mapping->bs;
504   oid = mapping->indices;
505   nn  = (on*obs)/bs;
506   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
507 
508   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
509   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
510   for (i=0;i<nn;i++) {
511     PetscInt j;
512     for (j=0,cn=0;j<bs-1;j++) {
513       if (oid[i*bs+j] < 0) { cn++; continue; }
514       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
515     }
516     if (oid[i*bs+j] < 0) cn++;
517     if (cn) {
518       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
519       nid[i] = -1;
520     } else {
521       nid[i] = oid[i*bs]/bs;
522     }
523   }
524   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
525 
526   mapping->n           = nn;
527   mapping->bs          = bs;
528   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
529   mapping->indices     = nid;
530   mapping->globalstart = 0;
531   mapping->globalend   = 0;
532 
533   /* reset the cached information */
534   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
535   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
536   if (mapping->info_indices) {
537     PetscInt i;
538 
539     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
540     for (i=1; i<mapping->info_nproc; i++) {
541       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
542     }
543     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
544   }
545   mapping->info_cached = PETSC_FALSE;
546 
547   if (mapping->ops->destroy) {
548     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 /*@
554     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
555     ordering and a global parallel ordering.
556 
557     Not Collective
558 
559     Input Parameters:
560 .   mapping - mapping data structure
561 
562     Output Parameter:
563 .   bs - the blocksize
564 
565     Level: advanced
566 
567 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
568 @*/
569 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
570 {
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
573   *bs = mapping->bs;
574   PetscFunctionReturn(0);
575 }
576 
577 /*@
578     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
579     ordering and a global parallel ordering.
580 
581     Not Collective, but communicator may have more than one process
582 
583     Input Parameters:
584 +   comm - MPI communicator
585 .   bs - the block size
586 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
587 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
588 -   mode - see PetscCopyMode
589 
590     Output Parameter:
591 .   mapping - new mapping data structure
592 
593     Notes:
594     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
595 
596     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
597     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
598     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
599 
600     Level: advanced
601 
602 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
603           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
604 @*/
605 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
606 {
607   PetscErrorCode ierr;
608   PetscInt       *in;
609 
610   PetscFunctionBegin;
611   if (n) PetscValidIntPointer(indices,4);
612   PetscValidPointer(mapping,6);
613 
614   *mapping = NULL;
615   ierr = ISInitializePackage();CHKERRQ(ierr);
616 
617   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
618   (*mapping)->n  = n;
619   (*mapping)->bs = bs;
620   if (mode == PETSC_COPY_VALUES) {
621     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
622     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
623     (*mapping)->indices = in;
624     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
625   } else if (mode == PETSC_OWN_POINTER) {
626     (*mapping)->indices = (PetscInt*)indices;
627     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
628   }
629   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
630   PetscFunctionReturn(0);
631 }
632 
633 PetscFunctionList ISLocalToGlobalMappingList = NULL;
634 
635 /*@
636    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
637 
638    Not collective
639 
640    Input Parameters:
641 .  mapping - mapping data structure
642 
643    Level: advanced
644 
645 @*/
646 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
647 {
648   PetscErrorCode             ierr;
649   char                       type[256];
650   ISLocalToGlobalMappingType defaulttype = "Not set";
651   PetscBool                  flg;
652 
653   PetscFunctionBegin;
654   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
655   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
656   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
657   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
658   if (flg) {
659     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
660   }
661   ierr = PetscOptionsEnd();CHKERRQ(ierr);
662   PetscFunctionReturn(0);
663 }
664 
665 /*@
666    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
667    ordering and a global parallel ordering.
668 
669    Note Collective
670 
671    Input Parameters:
672 .  mapping - mapping data structure
673 
674    Level: advanced
675 
676 .seealso: ISLocalToGlobalMappingCreate()
677 @*/
678 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
679 {
680   PetscErrorCode ierr;
681 
682   PetscFunctionBegin;
683   if (!*mapping) PetscFunctionReturn(0);
684   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
685   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
686   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
687   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
688   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
689   if ((*mapping)->info_indices) {
690     PetscInt i;
691 
692     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
693     for (i=1; i<(*mapping)->info_nproc; i++) {
694       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
695     }
696     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
697   }
698   if ((*mapping)->info_nodei) {
699     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
700   }
701   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
702   if ((*mapping)->ops->destroy) {
703     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
704   }
705   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
706   *mapping = NULL;
707   PetscFunctionReturn(0);
708 }
709 
710 /*@
711     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
712     a new index set using the global numbering defined in an ISLocalToGlobalMapping
713     context.
714 
715     Collective on is
716 
717     Input Parameters:
718 +   mapping - mapping between local and global numbering
719 -   is - index set in local numbering
720 
721     Output Parameters:
722 .   newis - index set in global numbering
723 
724     Notes:
725     The output IS will have the same communicator of the input IS.
726 
727     Level: advanced
728 
729 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
730           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
731 @*/
732 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
733 {
734   PetscErrorCode ierr;
735   PetscInt       n,*idxout;
736   const PetscInt *idxin;
737 
738   PetscFunctionBegin;
739   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
740   PetscValidHeaderSpecific(is,IS_CLASSID,2);
741   PetscValidPointer(newis,3);
742 
743   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
744   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
745   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
746   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
747   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
748   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
749   PetscFunctionReturn(0);
750 }
751 
752 /*@
753    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
754    and converts them to the global numbering.
755 
756    Not collective
757 
758    Input Parameters:
759 +  mapping - the local to global mapping context
760 .  N - number of integers
761 -  in - input indices in local numbering
762 
763    Output Parameter:
764 .  out - indices in global numbering
765 
766    Notes:
767    The in and out array parameters may be identical.
768 
769    Level: advanced
770 
771 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
772           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
773           AOPetscToApplication(), ISGlobalToLocalMappingApply()
774 
775 @*/
776 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
777 {
778   PetscInt i,bs,Nmax;
779 
780   PetscFunctionBegin;
781   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
782   bs   = mapping->bs;
783   Nmax = bs*mapping->n;
784   if (bs == 1) {
785     const PetscInt *idx = mapping->indices;
786     for (i=0; i<N; i++) {
787       if (in[i] < 0) {
788         out[i] = in[i];
789         continue;
790       }
791       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
792       out[i] = idx[in[i]];
793     }
794   } else {
795     const PetscInt *idx = mapping->indices;
796     for (i=0; i<N; i++) {
797       if (in[i] < 0) {
798         out[i] = in[i];
799         continue;
800       }
801       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
802       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
803     }
804   }
805   PetscFunctionReturn(0);
806 }
807 
808 /*@
809    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
810 
811    Not collective
812 
813    Input Parameters:
814 +  mapping - the local to global mapping context
815 .  N - number of integers
816 -  in - input indices in local block numbering
817 
818    Output Parameter:
819 .  out - indices in global block numbering
820 
821    Notes:
822    The in and out array parameters may be identical.
823 
824    Example:
825      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
826      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
827 
828    Level: advanced
829 
830 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
831           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
832           AOPetscToApplication(), ISGlobalToLocalMappingApply()
833 
834 @*/
835 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
836 {
837 
838   PetscFunctionBegin;
839   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
840   {
841     PetscInt i,Nmax = mapping->n;
842     const PetscInt *idx = mapping->indices;
843 
844     for (i=0; i<N; i++) {
845       if (in[i] < 0) {
846         out[i] = in[i];
847         continue;
848       }
849       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
850       out[i] = idx[in[i]];
851     }
852   }
853   PetscFunctionReturn(0);
854 }
855 
856 /*@
857     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
858     specified with a global numbering.
859 
860     Not collective
861 
862     Input Parameters:
863 +   mapping - mapping between local and global numbering
864 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
865            IS_GTOLM_DROP - drops the indices with no local value from the output list
866 .   n - number of global indices to map
867 -   idx - global indices to map
868 
869     Output Parameters:
870 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
871 -   idxout - local index of each global index, one must pass in an array long enough
872              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
873              idxout == NULL to determine the required length (returned in nout)
874              and then allocate the required space and call ISGlobalToLocalMappingApply()
875              a second time to set the values.
876 
877     Notes:
878     Either nout or idxout may be NULL. idx and idxout may be identical.
879 
880     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
881     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
882     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
883 
884     Level: advanced
885 
886     Developer Note: The manual page states that idx and idxout may be identical but the calling
887        sequence declares idx as const so it cannot be the same as idxout.
888 
889 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
890           ISLocalToGlobalMappingDestroy()
891 @*/
892 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
893 {
894   PetscErrorCode ierr;
895 
896   PetscFunctionBegin;
897   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
898   if (!mapping->data) {
899     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
900   }
901   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 /*@
906     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
907     a new index set using the local numbering defined in an ISLocalToGlobalMapping
908     context.
909 
910     Not collective
911 
912     Input Parameters:
913 +   mapping - mapping between local and global numbering
914 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
915            IS_GTOLM_DROP - drops the indices with no local value from the output list
916 -   is - index set in global numbering
917 
918     Output Parameters:
919 .   newis - index set in local numbering
920 
921     Notes:
922     The output IS will be sequential, as it encodes a purely local operation
923 
924     Level: advanced
925 
926 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
927           ISLocalToGlobalMappingDestroy()
928 @*/
929 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
930 {
931   PetscErrorCode ierr;
932   PetscInt       n,nout,*idxout;
933   const PetscInt *idxin;
934 
935   PetscFunctionBegin;
936   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
937   PetscValidHeaderSpecific(is,IS_CLASSID,3);
938   PetscValidPointer(newis,4);
939 
940   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
941   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
942   if (type == IS_GTOLM_MASK) {
943     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
944   } else {
945     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
946     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
947   }
948   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
949   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
950   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
951   PetscFunctionReturn(0);
952 }
953 
954 /*@
955     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
956     specified with a block global numbering.
957 
958     Not collective
959 
960     Input Parameters:
961 +   mapping - mapping between local and global numbering
962 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
963            IS_GTOLM_DROP - drops the indices with no local value from the output list
964 .   n - number of global indices to map
965 -   idx - global indices to map
966 
967     Output Parameters:
968 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
969 -   idxout - local index of each global index, one must pass in an array long enough
970              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
971              idxout == NULL to determine the required length (returned in nout)
972              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
973              a second time to set the values.
974 
975     Notes:
976     Either nout or idxout may be NULL. idx and idxout may be identical.
977 
978     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
979     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
980     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
981 
982     Level: advanced
983 
984     Developer Note: The manual page states that idx and idxout may be identical but the calling
985        sequence declares idx as const so it cannot be the same as idxout.
986 
987 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
988           ISLocalToGlobalMappingDestroy()
989 @*/
990 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
991                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
992 {
993   PetscErrorCode ierr;
994 
995   PetscFunctionBegin;
996   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
997   if (!mapping->data) {
998     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
999   }
1000   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 /*@C
1005     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1006      each index shared by more than one processor
1007 
1008     Collective on ISLocalToGlobalMapping
1009 
1010     Input Parameter:
1011 .   mapping - the mapping from local to global indexing
1012 
1013     Output Parameters:
1014 +   nproc - number of processors that are connected to this one
1015 .   proc - neighboring processors
1016 .   numproc - number of indices for each subdomain (processor)
1017 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1018 
1019     Level: advanced
1020 
1021     Fortran Usage:
1022 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1023 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1024           PetscInt indices[nproc][numprocmax],ierr)
1025         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1026         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1027 
1028 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1029           ISLocalToGlobalMappingRestoreInfo()
1030 @*/
1031 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1032 {
1033   PetscErrorCode ierr;
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1037   if (mapping->info_cached) {
1038     *nproc    = mapping->info_nproc;
1039     *procs    = mapping->info_procs;
1040     *numprocs = mapping->info_numprocs;
1041     *indices  = mapping->info_indices;
1042   } else {
1043     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1044   }
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1049 {
1050   PetscErrorCode ierr;
1051   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1052   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1053   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1054   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1055   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1056   PetscInt       first_procs,first_numprocs,*first_indices;
1057   MPI_Request    *recv_waits,*send_waits;
1058   MPI_Status     recv_status,*send_status,*recv_statuses;
1059   MPI_Comm       comm;
1060   PetscBool      debug = PETSC_FALSE;
1061 
1062   PetscFunctionBegin;
1063   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1064   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1065   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1066   if (size == 1) {
1067     *nproc         = 0;
1068     *procs         = NULL;
1069     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1070     (*numprocs)[0] = 0;
1071     ierr           = PetscNew(indices);CHKERRQ(ierr);
1072     (*indices)[0]  = NULL;
1073     /* save info for reuse */
1074     mapping->info_nproc = *nproc;
1075     mapping->info_procs = *procs;
1076     mapping->info_numprocs = *numprocs;
1077     mapping->info_indices = *indices;
1078     mapping->info_cached = PETSC_TRUE;
1079     PetscFunctionReturn(0);
1080   }
1081 
1082   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1083 
1084   /*
1085     Notes on ISLocalToGlobalMappingGetBlockInfo
1086 
1087     globally owned node - the nodes that have been assigned to this processor in global
1088            numbering, just for this routine.
1089 
1090     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1091            boundary (i.e. is has more than one local owner)
1092 
1093     locally owned node - node that exists on this processors subdomain
1094 
1095     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1096            local subdomain
1097   */
1098   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1099   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1100   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1101 
1102   for (i=0; i<n; i++) {
1103     if (lindices[i] > max) max = lindices[i];
1104   }
1105   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRMPI(ierr);
1106   Ng++;
1107   ierr   = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1108   ierr   = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1109   scale  = Ng/size + 1;
1110   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1111   rstart = scale*rank;
1112 
1113   /* determine ownership ranges of global indices */
1114   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1115   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1116 
1117   /* determine owners of each local node  */
1118   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1119   for (i=0; i<n; i++) {
1120     proc             = lindices[i]/scale; /* processor that globally owns this index */
1121     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1122     owner[i]         = proc;
1123     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1124   }
1125   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1126   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1127 
1128   /* inform other processors of number of messages and max length*/
1129   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1130   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1131 
1132   /* post receives for owned rows */
1133   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1134   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1135   for (i=0; i<nrecvs; i++) {
1136     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr);
1137   }
1138 
1139   /* pack messages containing lists of local nodes to owners */
1140   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1141   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1142   starts[0] = 0;
1143   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1144   for (i=0; i<n; i++) {
1145     sends[starts[owner[i]]++] = lindices[i];
1146     sends[starts[owner[i]]++] = i;
1147   }
1148   ierr = PetscFree(owner);CHKERRQ(ierr);
1149   starts[0] = 0;
1150   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1151 
1152   /* send the messages */
1153   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1154   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1155   cnt = 0;
1156   for (i=0; i<size; i++) {
1157     if (nprocs[2*i]) {
1158       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr);
1159       dest[cnt] = i;
1160       cnt++;
1161     }
1162   }
1163   ierr = PetscFree(starts);CHKERRQ(ierr);
1164 
1165   /* wait on receives */
1166   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1167   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1168   cnt  = nrecvs;
1169   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1170   while (cnt) {
1171     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
1172     /* unpack receives into our local space */
1173     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr);
1174     source[imdex] = recv_status.MPI_SOURCE;
1175     len[imdex]    = len[imdex]/2;
1176     /* count how many local owners for each of my global owned indices */
1177     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1178     cnt--;
1179   }
1180   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1181 
1182   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1183   nowned  = 0;
1184   nownedm = 0;
1185   for (i=0; i<ng; i++) {
1186     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1187   }
1188 
1189   /* create single array to contain rank of all local owners of each globally owned index */
1190   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1191   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1192   starts[0] = 0;
1193   for (i=1; i<ng; i++) {
1194     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1195     else starts[i] = starts[i-1];
1196   }
1197 
1198   /* for each nontrival globally owned node list all arriving processors */
1199   for (i=0; i<nrecvs; i++) {
1200     for (j=0; j<len[i]; j++) {
1201       node = recvs[2*i*nmax+2*j]-rstart;
1202       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1203     }
1204   }
1205 
1206   if (debug) { /* -----------------------------------  */
1207     starts[0] = 0;
1208     for (i=1; i<ng; i++) {
1209       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1210       else starts[i] = starts[i-1];
1211     }
1212     for (i=0; i<ng; i++) {
1213       if (nownedsenders[i] > 1) {
1214         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1215         for (j=0; j<nownedsenders[i]; j++) {
1216           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1217         }
1218         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1219       }
1220     }
1221     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1222   } /* -----------------------------------  */
1223 
1224   /* wait on original sends */
1225   if (nsends) {
1226     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1227     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
1228     ierr = PetscFree(send_status);CHKERRQ(ierr);
1229   }
1230   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1231   ierr = PetscFree(sends);CHKERRQ(ierr);
1232   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1233 
1234   /* pack messages to send back to local owners */
1235   starts[0] = 0;
1236   for (i=1; i<ng; i++) {
1237     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1238     else starts[i] = starts[i-1];
1239   }
1240   nsends2 = nrecvs;
1241   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1242   for (i=0; i<nrecvs; i++) {
1243     nprocs[i] = 1;
1244     for (j=0; j<len[i]; j++) {
1245       node = recvs[2*i*nmax+2*j]-rstart;
1246       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1247     }
1248   }
1249   nt = 0;
1250   for (i=0; i<nsends2; i++) nt += nprocs[i];
1251 
1252   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1253   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1254 
1255   starts2[0] = 0;
1256   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1257   /*
1258      Each message is 1 + nprocs[i] long, and consists of
1259        (0) the number of nodes being sent back
1260        (1) the local node number,
1261        (2) the number of processors sharing it,
1262        (3) the processors sharing it
1263   */
1264   for (i=0; i<nsends2; i++) {
1265     cnt = 1;
1266     sends2[starts2[i]] = 0;
1267     for (j=0; j<len[i]; j++) {
1268       node = recvs[2*i*nmax+2*j]-rstart;
1269       if (nownedsenders[node] > 1) {
1270         sends2[starts2[i]]++;
1271         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1272         sends2[starts2[i]+cnt++] = nownedsenders[node];
1273         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1274         cnt += nownedsenders[node];
1275       }
1276     }
1277   }
1278 
1279   /* receive the message lengths */
1280   nrecvs2 = nsends;
1281   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1282   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1283   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1284   for (i=0; i<nrecvs2; i++) {
1285     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr);
1286   }
1287 
1288   /* send the message lengths */
1289   for (i=0; i<nsends2; i++) {
1290     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr);
1291   }
1292 
1293   /* wait on receives of lens */
1294   if (nrecvs2) {
1295     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1296     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1297     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1298   }
1299   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1300 
1301   starts3[0] = 0;
1302   nt         = 0;
1303   for (i=0; i<nrecvs2-1; i++) {
1304     starts3[i+1] = starts3[i] + lens2[i];
1305     nt          += lens2[i];
1306   }
1307   if (nrecvs2) nt += lens2[nrecvs2-1];
1308 
1309   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1310   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1311   for (i=0; i<nrecvs2; i++) {
1312     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr);
1313   }
1314 
1315   /* send the messages */
1316   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1317   for (i=0; i<nsends2; i++) {
1318     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr);
1319   }
1320 
1321   /* wait on receives */
1322   if (nrecvs2) {
1323     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1324     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1325     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1326   }
1327   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1328   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1329 
1330   if (debug) { /* -----------------------------------  */
1331     cnt = 0;
1332     for (i=0; i<nrecvs2; i++) {
1333       nt = recvs2[cnt++];
1334       for (j=0; j<nt; j++) {
1335         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1336         for (k=0; k<recvs2[cnt+1]; k++) {
1337           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1338         }
1339         cnt += 2 + recvs2[cnt+1];
1340         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1341       }
1342     }
1343     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1344   } /* -----------------------------------  */
1345 
1346   /* count number subdomains for each local node */
1347   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1348   cnt  = 0;
1349   for (i=0; i<nrecvs2; i++) {
1350     nt = recvs2[cnt++];
1351     for (j=0; j<nt; j++) {
1352       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1353       cnt += 2 + recvs2[cnt+1];
1354     }
1355   }
1356   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1357   *nproc    = nt;
1358   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1359   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1360   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1361   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1362   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1363   cnt  = 0;
1364   for (i=0; i<size; i++) {
1365     if (nprocs[i] > 0) {
1366       bprocs[i]        = cnt;
1367       (*procs)[cnt]    = i;
1368       (*numprocs)[cnt] = nprocs[i];
1369       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1370       cnt++;
1371     }
1372   }
1373 
1374   /* make the list of subdomains for each nontrivial local node */
1375   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1376   cnt  = 0;
1377   for (i=0; i<nrecvs2; i++) {
1378     nt = recvs2[cnt++];
1379     for (j=0; j<nt; j++) {
1380       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1381       cnt += 2 + recvs2[cnt+1];
1382     }
1383   }
1384   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1385   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1386 
1387   /* sort the node indexing by their global numbers */
1388   nt = *nproc;
1389   for (i=0; i<nt; i++) {
1390     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1391     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1392     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1393     ierr = PetscFree(tmp);CHKERRQ(ierr);
1394   }
1395 
1396   if (debug) { /* -----------------------------------  */
1397     nt = *nproc;
1398     for (i=0; i<nt; i++) {
1399       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1400       for (j=0; j<(*numprocs)[i]; j++) {
1401         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1402       }
1403       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1404     }
1405     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1406   } /* -----------------------------------  */
1407 
1408   /* wait on sends */
1409   if (nsends2) {
1410     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1411     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr);
1412     ierr = PetscFree(send_status);CHKERRQ(ierr);
1413   }
1414 
1415   ierr = PetscFree(starts3);CHKERRQ(ierr);
1416   ierr = PetscFree(dest);CHKERRQ(ierr);
1417   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1418 
1419   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1420   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1421   ierr = PetscFree(starts);CHKERRQ(ierr);
1422   ierr = PetscFree(starts2);CHKERRQ(ierr);
1423   ierr = PetscFree(lens2);CHKERRQ(ierr);
1424 
1425   ierr = PetscFree(source);CHKERRQ(ierr);
1426   ierr = PetscFree(len);CHKERRQ(ierr);
1427   ierr = PetscFree(recvs);CHKERRQ(ierr);
1428   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1429   ierr = PetscFree(sends2);CHKERRQ(ierr);
1430 
1431   /* put the information about myself as the first entry in the list */
1432   first_procs    = (*procs)[0];
1433   first_numprocs = (*numprocs)[0];
1434   first_indices  = (*indices)[0];
1435   for (i=0; i<*nproc; i++) {
1436     if ((*procs)[i] == rank) {
1437       (*procs)[0]    = (*procs)[i];
1438       (*numprocs)[0] = (*numprocs)[i];
1439       (*indices)[0]  = (*indices)[i];
1440       (*procs)[i]    = first_procs;
1441       (*numprocs)[i] = first_numprocs;
1442       (*indices)[i]  = first_indices;
1443       break;
1444     }
1445   }
1446 
1447   /* save info for reuse */
1448   mapping->info_nproc = *nproc;
1449   mapping->info_procs = *procs;
1450   mapping->info_numprocs = *numprocs;
1451   mapping->info_indices = *indices;
1452   mapping->info_cached = PETSC_TRUE;
1453   PetscFunctionReturn(0);
1454 }
1455 
1456 /*@C
1457     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1458 
1459     Collective on ISLocalToGlobalMapping
1460 
1461     Input Parameter:
1462 .   mapping - the mapping from local to global indexing
1463 
1464     Output Parameters:
1465 +   nproc - number of processors that are connected to this one
1466 .   proc - neighboring processors
1467 .   numproc - number of indices for each processor
1468 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1469 
1470     Level: advanced
1471 
1472 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1473           ISLocalToGlobalMappingGetInfo()
1474 @*/
1475 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1476 {
1477   PetscErrorCode ierr;
1478 
1479   PetscFunctionBegin;
1480   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1481   if (mapping->info_free) {
1482     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1483     if (*indices) {
1484       PetscInt i;
1485 
1486       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1487       for (i=1; i<*nproc; i++) {
1488         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1489       }
1490       ierr = PetscFree(*indices);CHKERRQ(ierr);
1491     }
1492   }
1493   *nproc    = 0;
1494   *procs    = NULL;
1495   *numprocs = NULL;
1496   *indices  = NULL;
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 /*@C
1501     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1502      each index shared by more than one processor
1503 
1504     Collective on ISLocalToGlobalMapping
1505 
1506     Input Parameter:
1507 .   mapping - the mapping from local to global indexing
1508 
1509     Output Parameters:
1510 +   nproc - number of processors that are connected to this one
1511 .   proc - neighboring processors
1512 .   numproc - number of indices for each subdomain (processor)
1513 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1514 
1515     Level: advanced
1516 
1517     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1518 
1519     Fortran Usage:
1520 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1521 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1522           PetscInt indices[nproc][numprocmax],ierr)
1523         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1524         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1525 
1526 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1527           ISLocalToGlobalMappingRestoreInfo()
1528 @*/
1529 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1530 {
1531   PetscErrorCode ierr;
1532   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1533 
1534   PetscFunctionBegin;
1535   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1536   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1537   if (bs > 1) { /* we need to expand the cached info */
1538     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1539     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1540     for (i=0; i<*nproc; i++) {
1541       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1542       for (j=0; j<bnumprocs[i]; j++) {
1543         for (k=0; k<bs; k++) {
1544           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1545         }
1546       }
1547       (*numprocs)[i] = bnumprocs[i]*bs;
1548     }
1549     mapping->info_free = PETSC_TRUE;
1550   } else {
1551     *numprocs = bnumprocs;
1552     *indices  = bindices;
1553   }
1554   PetscFunctionReturn(0);
1555 }
1556 
1557 /*@C
1558     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1559 
1560     Collective on ISLocalToGlobalMapping
1561 
1562     Input Parameter:
1563 .   mapping - the mapping from local to global indexing
1564 
1565     Output Parameters:
1566 +   nproc - number of processors that are connected to this one
1567 .   proc - neighboring processors
1568 .   numproc - number of indices for each processor
1569 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1570 
1571     Level: advanced
1572 
1573 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1574           ISLocalToGlobalMappingGetInfo()
1575 @*/
1576 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1577 {
1578   PetscErrorCode ierr;
1579 
1580   PetscFunctionBegin;
1581   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1582   PetscFunctionReturn(0);
1583 }
1584 
1585 /*@C
1586     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1587 
1588     Collective on ISLocalToGlobalMapping
1589 
1590     Input Parameter:
1591 .   mapping - the mapping from local to global indexing
1592 
1593     Output Parameters:
1594 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1595 .   count - number of neighboring processors per node
1596 -   indices - indices of processes sharing the node (sorted)
1597 
1598     Level: advanced
1599 
1600     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1601 
1602 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1603           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1604 @*/
1605 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1606 {
1607   PetscInt       n;
1608   PetscErrorCode ierr;
1609 
1610   PetscFunctionBegin;
1611   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1612   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1613   if (!mapping->info_nodec) {
1614     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1615 
1616     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1617     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1618     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1619     m = n;
1620     mapping->info_nodec[n] = 0;
1621     for (i=1;i<n_neigh;i++) {
1622       PetscInt j;
1623 
1624       m += n_shared[i];
1625       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1626     }
1627     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1628     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1629     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1630     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1631     for (i=1;i<n_neigh;i++) {
1632       PetscInt j;
1633 
1634       for (j=0;j<n_shared[i];j++) {
1635         PetscInt k = shared[i][j];
1636 
1637         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1638         mapping->info_nodec[k] += 1;
1639       }
1640     }
1641     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1642     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1643   }
1644   if (nnodes)  *nnodes  = n;
1645   if (count)   *count   = mapping->info_nodec;
1646   if (indices) *indices = mapping->info_nodei;
1647   PetscFunctionReturn(0);
1648 }
1649 
1650 /*@C
1651     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1652 
1653     Collective on ISLocalToGlobalMapping
1654 
1655     Input Parameter:
1656 .   mapping - the mapping from local to global indexing
1657 
1658     Output Parameters:
1659 +   nnodes - number of local nodes
1660 .   count - number of neighboring processors per node
1661 -   indices - indices of processes sharing the node (sorted)
1662 
1663     Level: advanced
1664 
1665 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1666           ISLocalToGlobalMappingGetInfo()
1667 @*/
1668 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1669 {
1670   PetscFunctionBegin;
1671   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1672   if (nnodes)  *nnodes  = 0;
1673   if (count)   *count   = NULL;
1674   if (indices) *indices = NULL;
1675   PetscFunctionReturn(0);
1676 }
1677 
1678 /*@C
1679    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1680 
1681    Not Collective
1682 
1683    Input Parameter:
1684 . ltog - local to global mapping
1685 
1686    Output Parameter:
1687 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1688 
1689    Level: advanced
1690 
1691    Notes:
1692     ISLocalToGlobalMappingGetSize() returns the length the this array
1693 
1694 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1695 @*/
1696 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1697 {
1698   PetscFunctionBegin;
1699   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1700   PetscValidPointer(array,2);
1701   if (ltog->bs == 1) {
1702     *array = ltog->indices;
1703   } else {
1704     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1705     const PetscInt *ii;
1706     PetscErrorCode ierr;
1707 
1708     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1709     *array = jj;
1710     k    = 0;
1711     ii   = ltog->indices;
1712     for (i=0; i<n; i++)
1713       for (j=0; j<bs; j++)
1714         jj[k++] = bs*ii[i] + j;
1715   }
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 /*@C
1720    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1721 
1722    Not Collective
1723 
1724    Input Parameters:
1725 + ltog - local to global mapping
1726 - array - array of indices
1727 
1728    Level: advanced
1729 
1730 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1731 @*/
1732 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1733 {
1734   PetscFunctionBegin;
1735   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1736   PetscValidPointer(array,2);
1737   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1738 
1739   if (ltog->bs > 1) {
1740     PetscErrorCode ierr;
1741     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1742   }
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 /*@C
1747    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1748 
1749    Not Collective
1750 
1751    Input Parameter:
1752 . ltog - local to global mapping
1753 
1754    Output Parameter:
1755 . array - array of indices
1756 
1757    Level: advanced
1758 
1759 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1760 @*/
1761 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1762 {
1763   PetscFunctionBegin;
1764   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1765   PetscValidPointer(array,2);
1766   *array = ltog->indices;
1767   PetscFunctionReturn(0);
1768 }
1769 
1770 /*@C
1771    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1772 
1773    Not Collective
1774 
1775    Input Parameters:
1776 + ltog - local to global mapping
1777 - array - array of indices
1778 
1779    Level: advanced
1780 
1781 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1782 @*/
1783 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1784 {
1785   PetscFunctionBegin;
1786   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1787   PetscValidPointer(array,2);
1788   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1789   *array = NULL;
1790   PetscFunctionReturn(0);
1791 }
1792 
1793 /*@C
1794    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1795 
1796    Not Collective
1797 
1798    Input Parameters:
1799 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1800 . n - number of mappings to concatenate
1801 - ltogs - local to global mappings
1802 
1803    Output Parameter:
1804 . ltogcat - new mapping
1805 
1806    Note: this currently always returns a mapping with block size of 1
1807 
1808    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1809 
1810    Level: advanced
1811 
1812 .seealso: ISLocalToGlobalMappingCreate()
1813 @*/
1814 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1815 {
1816   PetscInt       i,cnt,m,*idx;
1817   PetscErrorCode ierr;
1818 
1819   PetscFunctionBegin;
1820   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1821   if (n > 0) PetscValidPointer(ltogs,3);
1822   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1823   PetscValidPointer(ltogcat,4);
1824   for (cnt=0,i=0; i<n; i++) {
1825     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1826     cnt += m;
1827   }
1828   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1829   for (cnt=0,i=0; i<n; i++) {
1830     const PetscInt *subidx;
1831     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1832     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1833     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1834     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1835     cnt += m;
1836   }
1837   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1838   PetscFunctionReturn(0);
1839 }
1840 
1841 /*MC
1842       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1843                                     used this is good for only small and moderate size problems.
1844 
1845    Options Database Keys:
1846 .   -islocaltoglobalmapping_type basic - select this method
1847 
1848    Level: beginner
1849 
1850 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1851 M*/
1852 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1853 {
1854   PetscFunctionBegin;
1855   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1856   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1857   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1858   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1859   PetscFunctionReturn(0);
1860 }
1861 
1862 /*MC
1863       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1864                                     used this is good for large memory problems.
1865 
1866    Options Database Keys:
1867 .   -islocaltoglobalmapping_type hash - select this method
1868 
1869    Notes:
1870     This is selected automatically for large problems if the user does not set the type.
1871 
1872    Level: beginner
1873 
1874 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1875 M*/
1876 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1877 {
1878   PetscFunctionBegin;
1879   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1880   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1881   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1882   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1883   PetscFunctionReturn(0);
1884 }
1885 
1886 /*@C
1887     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1888 
1889    Not Collective
1890 
1891    Input Parameters:
1892 +  sname - name of a new method
1893 -  routine_create - routine to create method context
1894 
1895    Notes:
1896    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1897 
1898    Sample usage:
1899 .vb
1900    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1901 .ve
1902 
1903    Then, your mapping can be chosen with the procedural interface via
1904 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1905    or at runtime via the option
1906 $     -islocaltoglobalmapping_type my_mapper
1907 
1908    Level: advanced
1909 
1910 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1911 
1912 @*/
1913 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1914 {
1915   PetscErrorCode ierr;
1916 
1917   PetscFunctionBegin;
1918   ierr = ISInitializePackage();CHKERRQ(ierr);
1919   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1920   PetscFunctionReturn(0);
1921 }
1922 
1923 /*@C
1924    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1925 
1926    Logically Collective on ISLocalToGlobalMapping
1927 
1928    Input Parameters:
1929 +  ltog - the ISLocalToGlobalMapping object
1930 -  type - a known method
1931 
1932    Options Database Key:
1933 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1934     of available methods (for instance, basic or hash)
1935 
1936    Notes:
1937    See "petsc/include/petscis.h" for available methods
1938 
1939   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1940   then set the ISLocalToGlobalMapping type from the options database rather than by using
1941   this routine.
1942 
1943   Level: intermediate
1944 
1945   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1946   are accessed by ISLocalToGlobalMappingSetType().
1947 
1948 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1949 
1950 @*/
1951 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1952 {
1953   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1954   PetscBool      match;
1955 
1956   PetscFunctionBegin;
1957   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1958   PetscValidCharPointer(type,2);
1959 
1960   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1961   if (match) PetscFunctionReturn(0);
1962 
1963   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1964   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1965   /* Destroy the previous private LTOG context */
1966   if (ltog->ops->destroy) {
1967     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1968     ltog->ops->destroy = NULL;
1969   }
1970   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1971   ierr = (*r)(ltog);CHKERRQ(ierr);
1972   PetscFunctionReturn(0);
1973 }
1974 
1975 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1976 
1977 /*@C
1978   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1979 
1980   Not Collective
1981 
1982   Level: advanced
1983 
1984 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1985 @*/
1986 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1987 {
1988   PetscErrorCode ierr;
1989 
1990   PetscFunctionBegin;
1991   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1992   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1993 
1994   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1995   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1996   PetscFunctionReturn(0);
1997 }
1998