xref: /petsc/src/vec/is/utils/isltog.c (revision d6685f554fbda8d96c6a5d73ab0e7a4e21a05c51)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47   PetscErrorCode ierr;
48 
49   PetscFunctionBeginHot;
50   *pStart = 0;
51   *points = NULL;
52   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
54   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
55   *pEnd   = *pStart + numCells;
56   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
57   PetscFunctionReturn(0);
58 }
59 
60 /*@C
61   ISRestorePointRange - Destroys the traversal description
62 
63   Not collective
64 
65   Input Parameters:
66 + pointIS - The IS object
67 . pStart  - The first index, from ISGetPointRange()
68 . pEnd    - One past the last index, from ISGetPointRange()
69 - points  - The indices, from ISGetPointRange()
70 
71   Notes:
72   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
73 $ ISGetPointRange(is, &pStart, &pEnd, &points);
74 $ for (p = pStart; p < pEnd; ++p) {
75 $   const PetscInt point = points ? points[p] : p;
76 $ }
77 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
78 
79   Level: intermediate
80 
81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
82 @*/
83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
84 {
85   PetscInt       step = 1;
86   PetscBool      isStride;
87   PetscErrorCode ierr;
88 
89   PetscFunctionBeginHot;
90   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
91   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
92   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
93   PetscFunctionReturn(0);
94 }
95 
96 /*@C
97   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
98 
99   Not collective
100 
101   Input Parameters:
102 + subpointIS - The IS object to be configured
103 . pStar   t  - The first index of the subrange
104 . pEnd       - One past the last index for the subrange
105 - points     - The indices for the entire range, from ISGetPointRange()
106 
107   Output Parameters:
108 . subpointIS - The IS object now configured to be a subrange
109 
110   Notes:
111   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
112 
113   Level: intermediate
114 
115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116 @*/
117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118 {
119   PetscErrorCode ierr;
120 
121   PetscFunctionBeginHot;
122   if (points) {
123     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
124     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
125   } else {
126     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
127     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 /* -----------------------------------------------------------------------------------------*/
133 
134 /*
135     Creates the global mapping information in the ISLocalToGlobalMapping structure
136 
137     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138 */
139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140 {
141   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   if (mapping->data) PetscFunctionReturn(0);
146   end   = 0;
147   start = PETSC_MAX_INT;
148 
149   for (i=0; i<n; i++) {
150     if (idx[i] < 0) continue;
151     if (idx[i] < start) start = idx[i];
152     if (idx[i] > end)   end   = idx[i];
153   }
154   if (start > end) {start = 0; end = -1;}
155   mapping->globalstart = start;
156   mapping->globalend   = end;
157   if (!((PetscObject)mapping)->type_name) {
158     if ((end - start) > PetscMax(4*n,1000000)) {
159       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
160     } else {
161       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
162     }
163   }
164   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169 {
170   PetscErrorCode              ierr;
171   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172   ISLocalToGlobalMapping_Basic *map;
173 
174   PetscFunctionBegin;
175   start            = mapping->globalstart;
176   end              = mapping->globalend;
177   ierr             = PetscNew(&map);CHKERRQ(ierr);
178   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
179   map->globals     = globals;
180   for (i=0; i<end-start+1; i++) globals[i] = -1;
181   for (i=0; i<n; i++) {
182     if (idx[i] < 0) continue;
183     globals[idx[i] - start] = i;
184   }
185   mapping->data = (void*)map;
186   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191 {
192   PetscErrorCode              ierr;
193   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194   ISLocalToGlobalMapping_Hash *map;
195 
196   PetscFunctionBegin;
197   ierr = PetscNew(&map);CHKERRQ(ierr);
198   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
199   for (i=0; i<n; i++) {
200     if (idx[i] < 0) continue;
201     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
202   }
203   mapping->data = (void*)map;
204   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209 {
210   PetscErrorCode              ierr;
211   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
212 
213   PetscFunctionBegin;
214   if (!map) PetscFunctionReturn(0);
215   ierr = PetscFree(map->globals);CHKERRQ(ierr);
216   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221 {
222   PetscErrorCode              ierr;
223   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
224 
225   PetscFunctionBegin;
226   if (!map) PetscFunctionReturn(0);
227   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
228   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME _Basic
234 #define GTOLBS mapping->bs
235 #define GTOL(g, local) do {                  \
236     local = map->globals[g/bs - start];      \
237     if (local >= 0) local = bs*local + (g % bs); \
238   } while (0)
239 
240 #include <../src/vec/is/utils/isltog.h>
241 
242 #define GTOLTYPE _Basic
243 #define GTOLNAME Block_Basic
244 #define GTOLBS 1
245 #define GTOL(g, local) do {                  \
246     local = map->globals[g - start];         \
247   } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME _Hash
252 #define GTOLBS mapping->bs
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255     if (local >= 0) local = bs*local + (g % bs);   \
256    } while (0)
257 #include <../src/vec/is/utils/isltog.h>
258 
259 #define GTOLTYPE _Hash
260 #define GTOLNAME Block_Hash
261 #define GTOLBS 1
262 #define GTOL(g, local) do {                         \
263     (void)PetscHMapIGet(map->globalht,g,&local);    \
264   } while (0)
265 #include <../src/vec/is/utils/isltog.h>
266 
267 /*@
268     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
269 
270     Not Collective
271 
272     Input Parameter:
273 .   ltog - local to global mapping
274 
275     Output Parameter:
276 .   nltog - the duplicated local to global mapping
277 
278     Level: advanced
279 
280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281 @*/
282 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283 {
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
288   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291 
292 /*@
293     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
294 
295     Not Collective
296 
297     Input Parameter:
298 .   ltog - local to global mapping
299 
300     Output Parameter:
301 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
302 
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
311   PetscValidIntPointer(n,2);
312   *n = mapping->bs*mapping->n;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@C
317    ISLocalToGlobalMappingViewFromOptions - View from Options
318 
319    Collective on ISLocalToGlobalMapping
320 
321    Input Parameters:
322 +  A - the local to global mapping object
323 .  obj - Optional object
324 -  name - command line option
325 
326    Level: intermediate
327 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
328 @*/
329 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
335   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 /*@C
340     ISLocalToGlobalMappingView - View a local to global mapping
341 
342     Not Collective
343 
344     Input Parameters:
345 +   ltog - local to global mapping
346 -   viewer - viewer
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
351 @*/
352 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
353 {
354   PetscInt       i;
355   PetscMPIInt    rank;
356   PetscBool      iascii;
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
361   if (!viewer) {
362     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
363   }
364   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
365 
366   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr);
367   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
368   if (iascii) {
369     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
370     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
371     for (i=0; i<mapping->n; i++) {
372       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
373     }
374     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
375     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
382     ordering and a global parallel ordering.
383 
384     Not collective
385 
386     Input Parameter:
387 .   is - index set containing the global numbers for each local number
388 
389     Output Parameter:
390 .   mapping - new mapping data structure
391 
392     Notes:
393     the block size of the IS determines the block size of the mapping
394     Level: advanced
395 
396 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
397 @*/
398 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
399 {
400   PetscErrorCode ierr;
401   PetscInt       n,bs;
402   const PetscInt *indices;
403   MPI_Comm       comm;
404   PetscBool      isblock;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(is,IS_CLASSID,1);
408   PetscValidPointer(mapping,2);
409 
410   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
411   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
412   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
413   if (!isblock) {
414     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
415     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
416     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
417   } else {
418     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
419     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
420     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
421     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
422   }
423   PetscFunctionReturn(0);
424 }
425 
426 /*@C
427     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
428     ordering and a global parallel ordering.
429 
430     Collective
431 
432     Input Parameters:
433 +   sf - star forest mapping contiguous local indices to (rank, offset)
434 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
435 
436     Output Parameter:
437 .   mapping - new mapping data structure
438 
439     Level: advanced
440 
441     Notes:
442     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
443 
444 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
445 @*/
446 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
447 {
448   PetscErrorCode ierr;
449   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
450   const PetscInt *ilocal;
451   MPI_Comm       comm;
452 
453   PetscFunctionBegin;
454   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
455   PetscValidPointer(mapping,3);
456 
457   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
458   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
459   if (start == PETSC_DECIDE) {
460     start = 0;
461     ierr = MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr);
462   } else PetscCheckFalse(start < 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
463   if (ilocal) {
464     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
465   }
466   else maxlocal = nleaves;
467   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
468   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
469   for (i=0; i<nroots; i++) globals[i] = start + i;
470   for (i=0; i<maxlocal; i++) ltog[i] = -1;
471   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
472   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
473   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
474   ierr = PetscFree(globals);CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 /*@
479     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
480 
481     Not collective
482 
483     Input Parameters:
484 +   mapping - mapping data structure
485 -   bs - the blocksize
486 
487     Level: advanced
488 
489 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
490 @*/
491 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
492 {
493   PetscInt       *nid;
494   const PetscInt *oid;
495   PetscInt       i,cn,on,obs,nn;
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
500   PetscCheckFalse(bs < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
501   if (bs == mapping->bs) PetscFunctionReturn(0);
502   on  = mapping->n;
503   obs = mapping->bs;
504   oid = mapping->indices;
505   nn  = (on*obs)/bs;
506   PetscCheckFalse((on*obs)%bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
507 
508   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
509   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
510   for (i=0;i<nn;i++) {
511     PetscInt j;
512     for (j=0,cn=0;j<bs-1;j++) {
513       if (oid[i*bs+j] < 0) { cn++; continue; }
514       PetscCheckFalse(oid[i*bs+j] != oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
515     }
516     if (oid[i*bs+j] < 0) cn++;
517     if (cn) {
518       PetscCheckFalse(cn != bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
519       nid[i] = -1;
520     } else {
521       nid[i] = oid[i*bs]/bs;
522     }
523   }
524   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
525 
526   mapping->n           = nn;
527   mapping->bs          = bs;
528   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
529   mapping->indices     = nid;
530   mapping->globalstart = 0;
531   mapping->globalend   = 0;
532 
533   /* reset the cached information */
534   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
535   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
536   if (mapping->info_indices) {
537     PetscInt i;
538 
539     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
540     for (i=1; i<mapping->info_nproc; i++) {
541       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
542     }
543     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
544   }
545   mapping->info_cached = PETSC_FALSE;
546 
547   if (mapping->ops->destroy) {
548     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 /*@
554     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
555     ordering and a global parallel ordering.
556 
557     Not Collective
558 
559     Input Parameters:
560 .   mapping - mapping data structure
561 
562     Output Parameter:
563 .   bs - the blocksize
564 
565     Level: advanced
566 
567 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
568 @*/
569 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
570 {
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
573   *bs = mapping->bs;
574   PetscFunctionReturn(0);
575 }
576 
577 /*@
578     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
579     ordering and a global parallel ordering.
580 
581     Not Collective, but communicator may have more than one process
582 
583     Input Parameters:
584 +   comm - MPI communicator
585 .   bs - the block size
586 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
587 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
588 -   mode - see PetscCopyMode
589 
590     Output Parameter:
591 .   mapping - new mapping data structure
592 
593     Notes:
594     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
595 
596     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
597     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
598     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
599 
600     Level: advanced
601 
602 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
603           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
604 @*/
605 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
606 {
607   PetscErrorCode ierr;
608   PetscInt       *in;
609 
610   PetscFunctionBegin;
611   if (n) PetscValidIntPointer(indices,4);
612   PetscValidPointer(mapping,6);
613 
614   *mapping = NULL;
615   ierr = ISInitializePackage();CHKERRQ(ierr);
616 
617   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
618   (*mapping)->n  = n;
619   (*mapping)->bs = bs;
620   if (mode == PETSC_COPY_VALUES) {
621     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
622     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
623     (*mapping)->indices = in;
624     (*mapping)->dealloc_indices = PETSC_TRUE;
625     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
626   } else if (mode == PETSC_OWN_POINTER) {
627     (*mapping)->indices = (PetscInt*)indices;
628     (*mapping)->dealloc_indices = PETSC_TRUE;
629     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
630   } else if (mode == PETSC_USE_POINTER) {
631     (*mapping)->indices = (PetscInt*)indices;
632   }
633   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
634   PetscFunctionReturn(0);
635 }
636 
637 PetscFunctionList ISLocalToGlobalMappingList = NULL;
638 
639 /*@
640    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
641 
642    Not collective
643 
644    Input Parameters:
645 .  mapping - mapping data structure
646 
647    Level: advanced
648 
649 @*/
650 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
651 {
652   PetscErrorCode             ierr;
653   char                       type[256];
654   ISLocalToGlobalMappingType defaulttype = "Not set";
655   PetscBool                  flg;
656 
657   PetscFunctionBegin;
658   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
659   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
660   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
661   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
662   if (flg) {
663     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
664   }
665   ierr = PetscOptionsEnd();CHKERRQ(ierr);
666   PetscFunctionReturn(0);
667 }
668 
669 /*@
670    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
671    ordering and a global parallel ordering.
672 
673    Note Collective
674 
675    Input Parameters:
676 .  mapping - mapping data structure
677 
678    Level: advanced
679 
680 .seealso: ISLocalToGlobalMappingCreate()
681 @*/
682 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
683 {
684   PetscErrorCode ierr;
685 
686   PetscFunctionBegin;
687   if (!*mapping) PetscFunctionReturn(0);
688   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
689   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
690   if ((*mapping)->dealloc_indices) {
691     ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
692   }
693   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
694   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
695   if ((*mapping)->info_indices) {
696     PetscInt i;
697 
698     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
699     for (i=1; i<(*mapping)->info_nproc; i++) {
700       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
701     }
702     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
703   }
704   if ((*mapping)->info_nodei) {
705     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
706   }
707   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
708   if ((*mapping)->ops->destroy) {
709     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
710   }
711   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
712   *mapping = NULL;
713   PetscFunctionReturn(0);
714 }
715 
716 /*@
717     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
718     a new index set using the global numbering defined in an ISLocalToGlobalMapping
719     context.
720 
721     Collective on is
722 
723     Input Parameters:
724 +   mapping - mapping between local and global numbering
725 -   is - index set in local numbering
726 
727     Output Parameters:
728 .   newis - index set in global numbering
729 
730     Notes:
731     The output IS will have the same communicator of the input IS.
732 
733     Level: advanced
734 
735 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
736           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
737 @*/
738 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
739 {
740   PetscErrorCode ierr;
741   PetscInt       n,*idxout;
742   const PetscInt *idxin;
743 
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
746   PetscValidHeaderSpecific(is,IS_CLASSID,2);
747   PetscValidPointer(newis,3);
748 
749   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
750   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
751   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
752   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
753   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
754   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
755   PetscFunctionReturn(0);
756 }
757 
758 /*@
759    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
760    and converts them to the global numbering.
761 
762    Not collective
763 
764    Input Parameters:
765 +  mapping - the local to global mapping context
766 .  N - number of integers
767 -  in - input indices in local numbering
768 
769    Output Parameter:
770 .  out - indices in global numbering
771 
772    Notes:
773    The in and out array parameters may be identical.
774 
775    Level: advanced
776 
777 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
778           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
779           AOPetscToApplication(), ISGlobalToLocalMappingApply()
780 
781 @*/
782 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
783 {
784   PetscInt i,bs,Nmax;
785 
786   PetscFunctionBegin;
787   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
788   bs   = mapping->bs;
789   Nmax = bs*mapping->n;
790   if (bs == 1) {
791     const PetscInt *idx = mapping->indices;
792     for (i=0; i<N; i++) {
793       if (in[i] < 0) {
794         out[i] = in[i];
795         continue;
796       }
797       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
798       out[i] = idx[in[i]];
799     }
800   } else {
801     const PetscInt *idx = mapping->indices;
802     for (i=0; i<N; i++) {
803       if (in[i] < 0) {
804         out[i] = in[i];
805         continue;
806       }
807       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
808       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
809     }
810   }
811   PetscFunctionReturn(0);
812 }
813 
814 /*@
815    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
816 
817    Not collective
818 
819    Input Parameters:
820 +  mapping - the local to global mapping context
821 .  N - number of integers
822 -  in - input indices in local block numbering
823 
824    Output Parameter:
825 .  out - indices in global block numbering
826 
827    Notes:
828    The in and out array parameters may be identical.
829 
830    Example:
831      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
832      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
833 
834    Level: advanced
835 
836 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
837           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
838           AOPetscToApplication(), ISGlobalToLocalMappingApply()
839 
840 @*/
841 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
842 {
843 
844   PetscFunctionBegin;
845   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
846   {
847     PetscInt i,Nmax = mapping->n;
848     const PetscInt *idx = mapping->indices;
849 
850     for (i=0; i<N; i++) {
851       if (in[i] < 0) {
852         out[i] = in[i];
853         continue;
854       }
855       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
856       out[i] = idx[in[i]];
857     }
858   }
859   PetscFunctionReturn(0);
860 }
861 
862 /*@
863     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
864     specified with a global numbering.
865 
866     Not collective
867 
868     Input Parameters:
869 +   mapping - mapping between local and global numbering
870 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
871            IS_GTOLM_DROP - drops the indices with no local value from the output list
872 .   n - number of global indices to map
873 -   idx - global indices to map
874 
875     Output Parameters:
876 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
877 -   idxout - local index of each global index, one must pass in an array long enough
878              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
879              idxout == NULL to determine the required length (returned in nout)
880              and then allocate the required space and call ISGlobalToLocalMappingApply()
881              a second time to set the values.
882 
883     Notes:
884     Either nout or idxout may be NULL. idx and idxout may be identical.
885 
886     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
887     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
888     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
889 
890     Level: advanced
891 
892     Developer Note: The manual page states that idx and idxout may be identical but the calling
893        sequence declares idx as const so it cannot be the same as idxout.
894 
895 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
896           ISLocalToGlobalMappingDestroy()
897 @*/
898 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
904   if (!mapping->data) {
905     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
906   }
907   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910 
911 /*@
912     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
913     a new index set using the local numbering defined in an ISLocalToGlobalMapping
914     context.
915 
916     Not collective
917 
918     Input Parameters:
919 +   mapping - mapping between local and global numbering
920 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
921            IS_GTOLM_DROP - drops the indices with no local value from the output list
922 -   is - index set in global numbering
923 
924     Output Parameters:
925 .   newis - index set in local numbering
926 
927     Notes:
928     The output IS will be sequential, as it encodes a purely local operation
929 
930     Level: advanced
931 
932 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
933           ISLocalToGlobalMappingDestroy()
934 @*/
935 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
936 {
937   PetscErrorCode ierr;
938   PetscInt       n,nout,*idxout;
939   const PetscInt *idxin;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
943   PetscValidHeaderSpecific(is,IS_CLASSID,3);
944   PetscValidPointer(newis,4);
945 
946   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
947   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
948   if (type == IS_GTOLM_MASK) {
949     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
950   } else {
951     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
952     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
953   }
954   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
955   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
956   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*@
961     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
962     specified with a block global numbering.
963 
964     Not collective
965 
966     Input Parameters:
967 +   mapping - mapping between local and global numbering
968 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
969            IS_GTOLM_DROP - drops the indices with no local value from the output list
970 .   n - number of global indices to map
971 -   idx - global indices to map
972 
973     Output Parameters:
974 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
975 -   idxout - local index of each global index, one must pass in an array long enough
976              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
977              idxout == NULL to determine the required length (returned in nout)
978              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
979              a second time to set the values.
980 
981     Notes:
982     Either nout or idxout may be NULL. idx and idxout may be identical.
983 
984     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
985     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
986     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
987 
988     Level: advanced
989 
990     Developer Note: The manual page states that idx and idxout may be identical but the calling
991        sequence declares idx as const so it cannot be the same as idxout.
992 
993 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
994           ISLocalToGlobalMappingDestroy()
995 @*/
996 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
997                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
998 {
999   PetscErrorCode ierr;
1000 
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1003   if (!mapping->data) {
1004     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
1005   }
1006   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 /*@C
1011     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1012      each index shared by more than one processor
1013 
1014     Collective on ISLocalToGlobalMapping
1015 
1016     Input Parameter:
1017 .   mapping - the mapping from local to global indexing
1018 
1019     Output Parameters:
1020 +   nproc - number of processors that are connected to this one
1021 .   proc - neighboring processors
1022 .   numproc - number of indices for each subdomain (processor)
1023 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1024 
1025     Level: advanced
1026 
1027     Fortran Usage:
1028 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1029 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1030           PetscInt indices[nproc][numprocmax],ierr)
1031         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1032         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1033 
1034 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1035           ISLocalToGlobalMappingRestoreInfo()
1036 @*/
1037 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1038 {
1039   PetscErrorCode ierr;
1040 
1041   PetscFunctionBegin;
1042   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1043   if (mapping->info_cached) {
1044     *nproc    = mapping->info_nproc;
1045     *procs    = mapping->info_procs;
1046     *numprocs = mapping->info_numprocs;
1047     *indices  = mapping->info_indices;
1048   } else {
1049     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1050   }
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1055 {
1056   PetscErrorCode ierr;
1057   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1058   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1059   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1060   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1061   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1062   PetscInt       first_procs,first_numprocs,*first_indices;
1063   MPI_Request    *recv_waits,*send_waits;
1064   MPI_Status     recv_status,*send_status,*recv_statuses;
1065   MPI_Comm       comm;
1066   PetscBool      debug = PETSC_FALSE;
1067 
1068   PetscFunctionBegin;
1069   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1070   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1071   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1072   if (size == 1) {
1073     *nproc         = 0;
1074     *procs         = NULL;
1075     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1076     (*numprocs)[0] = 0;
1077     ierr           = PetscNew(indices);CHKERRQ(ierr);
1078     (*indices)[0]  = NULL;
1079     /* save info for reuse */
1080     mapping->info_nproc = *nproc;
1081     mapping->info_procs = *procs;
1082     mapping->info_numprocs = *numprocs;
1083     mapping->info_indices = *indices;
1084     mapping->info_cached = PETSC_TRUE;
1085     PetscFunctionReturn(0);
1086   }
1087 
1088   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1089 
1090   /*
1091     Notes on ISLocalToGlobalMappingGetBlockInfo
1092 
1093     globally owned node - the nodes that have been assigned to this processor in global
1094            numbering, just for this routine.
1095 
1096     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1097            boundary (i.e. is has more than one local owner)
1098 
1099     locally owned node - node that exists on this processors subdomain
1100 
1101     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1102            local subdomain
1103   */
1104   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1105   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1106   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1107 
1108   for (i=0; i<n; i++) {
1109     if (lindices[i] > max) max = lindices[i];
1110   }
1111   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRMPI(ierr);
1112   Ng++;
1113   ierr   = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1114   ierr   = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1115   scale  = Ng/size + 1;
1116   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1117   rstart = scale*rank;
1118 
1119   /* determine ownership ranges of global indices */
1120   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1121   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1122 
1123   /* determine owners of each local node  */
1124   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1125   for (i=0; i<n; i++) {
1126     proc             = lindices[i]/scale; /* processor that globally owns this index */
1127     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1128     owner[i]         = proc;
1129     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1130   }
1131   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1132   ierr = PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends);CHKERRQ(ierr);
1133 
1134   /* inform other processors of number of messages and max length*/
1135   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1136   ierr = PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs);CHKERRQ(ierr);
1137 
1138   /* post receives for owned rows */
1139   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1140   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1141   for (i=0; i<nrecvs; i++) {
1142     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr);
1143   }
1144 
1145   /* pack messages containing lists of local nodes to owners */
1146   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1147   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1148   starts[0] = 0;
1149   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1150   for (i=0; i<n; i++) {
1151     sends[starts[owner[i]]++] = lindices[i];
1152     sends[starts[owner[i]]++] = i;
1153   }
1154   ierr = PetscFree(owner);CHKERRQ(ierr);
1155   starts[0] = 0;
1156   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1157 
1158   /* send the messages */
1159   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1160   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1161   cnt = 0;
1162   for (i=0; i<size; i++) {
1163     if (nprocs[2*i]) {
1164       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr);
1165       dest[cnt] = i;
1166       cnt++;
1167     }
1168   }
1169   ierr = PetscFree(starts);CHKERRQ(ierr);
1170 
1171   /* wait on receives */
1172   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1173   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1174   cnt  = nrecvs;
1175   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1176   while (cnt) {
1177     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
1178     /* unpack receives into our local space */
1179     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr);
1180     source[imdex] = recv_status.MPI_SOURCE;
1181     len[imdex]    = len[imdex]/2;
1182     /* count how many local owners for each of my global owned indices */
1183     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1184     cnt--;
1185   }
1186   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1187 
1188   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1189   nowned  = 0;
1190   nownedm = 0;
1191   for (i=0; i<ng; i++) {
1192     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1193   }
1194 
1195   /* create single array to contain rank of all local owners of each globally owned index */
1196   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1197   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1198   starts[0] = 0;
1199   for (i=1; i<ng; i++) {
1200     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1201     else starts[i] = starts[i-1];
1202   }
1203 
1204   /* for each nontrival globally owned node list all arriving processors */
1205   for (i=0; i<nrecvs; i++) {
1206     for (j=0; j<len[i]; j++) {
1207       node = recvs[2*i*nmax+2*j]-rstart;
1208       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1209     }
1210   }
1211 
1212   if (debug) { /* -----------------------------------  */
1213     starts[0] = 0;
1214     for (i=1; i<ng; i++) {
1215       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1216       else starts[i] = starts[i-1];
1217     }
1218     for (i=0; i<ng; i++) {
1219       if (nownedsenders[i] > 1) {
1220         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1221         for (j=0; j<nownedsenders[i]; j++) {
1222           ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1223         }
1224         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1225       }
1226     }
1227     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1228   } /* -----------------------------------  */
1229 
1230   /* wait on original sends */
1231   if (nsends) {
1232     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1233     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
1234     ierr = PetscFree(send_status);CHKERRQ(ierr);
1235   }
1236   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1237   ierr = PetscFree(sends);CHKERRQ(ierr);
1238   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1239 
1240   /* pack messages to send back to local owners */
1241   starts[0] = 0;
1242   for (i=1; i<ng; i++) {
1243     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1244     else starts[i] = starts[i-1];
1245   }
1246   nsends2 = nrecvs;
1247   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1248   for (i=0; i<nrecvs; i++) {
1249     nprocs[i] = 1;
1250     for (j=0; j<len[i]; j++) {
1251       node = recvs[2*i*nmax+2*j]-rstart;
1252       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1253     }
1254   }
1255   nt = 0;
1256   for (i=0; i<nsends2; i++) nt += nprocs[i];
1257 
1258   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1259   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1260 
1261   starts2[0] = 0;
1262   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1263   /*
1264      Each message is 1 + nprocs[i] long, and consists of
1265        (0) the number of nodes being sent back
1266        (1) the local node number,
1267        (2) the number of processors sharing it,
1268        (3) the processors sharing it
1269   */
1270   for (i=0; i<nsends2; i++) {
1271     cnt = 1;
1272     sends2[starts2[i]] = 0;
1273     for (j=0; j<len[i]; j++) {
1274       node = recvs[2*i*nmax+2*j]-rstart;
1275       if (nownedsenders[node] > 1) {
1276         sends2[starts2[i]]++;
1277         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1278         sends2[starts2[i]+cnt++] = nownedsenders[node];
1279         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1280         cnt += nownedsenders[node];
1281       }
1282     }
1283   }
1284 
1285   /* receive the message lengths */
1286   nrecvs2 = nsends;
1287   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1288   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1289   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1290   for (i=0; i<nrecvs2; i++) {
1291     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr);
1292   }
1293 
1294   /* send the message lengths */
1295   for (i=0; i<nsends2; i++) {
1296     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr);
1297   }
1298 
1299   /* wait on receives of lens */
1300   if (nrecvs2) {
1301     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1302     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1303     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1304   }
1305   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1306 
1307   starts3[0] = 0;
1308   nt         = 0;
1309   for (i=0; i<nrecvs2-1; i++) {
1310     starts3[i+1] = starts3[i] + lens2[i];
1311     nt          += lens2[i];
1312   }
1313   if (nrecvs2) nt += lens2[nrecvs2-1];
1314 
1315   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1316   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1317   for (i=0; i<nrecvs2; i++) {
1318     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr);
1319   }
1320 
1321   /* send the messages */
1322   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1323   for (i=0; i<nsends2; i++) {
1324     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr);
1325   }
1326 
1327   /* wait on receives */
1328   if (nrecvs2) {
1329     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1330     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1331     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1332   }
1333   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1334   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1335 
1336   if (debug) { /* -----------------------------------  */
1337     cnt = 0;
1338     for (i=0; i<nrecvs2; i++) {
1339       nt = recvs2[cnt++];
1340       for (j=0; j<nt; j++) {
1341         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1342         for (k=0; k<recvs2[cnt+1]; k++) {
1343           ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1344         }
1345         cnt += 2 + recvs2[cnt+1];
1346         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1347       }
1348     }
1349     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1350   } /* -----------------------------------  */
1351 
1352   /* count number subdomains for each local node */
1353   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1354   cnt  = 0;
1355   for (i=0; i<nrecvs2; i++) {
1356     nt = recvs2[cnt++];
1357     for (j=0; j<nt; j++) {
1358       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1359       cnt += 2 + recvs2[cnt+1];
1360     }
1361   }
1362   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1363   *nproc    = nt;
1364   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1365   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1366   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1367   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1368   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1369   cnt  = 0;
1370   for (i=0; i<size; i++) {
1371     if (nprocs[i] > 0) {
1372       bprocs[i]        = cnt;
1373       (*procs)[cnt]    = i;
1374       (*numprocs)[cnt] = nprocs[i];
1375       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1376       cnt++;
1377     }
1378   }
1379 
1380   /* make the list of subdomains for each nontrivial local node */
1381   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1382   cnt  = 0;
1383   for (i=0; i<nrecvs2; i++) {
1384     nt = recvs2[cnt++];
1385     for (j=0; j<nt; j++) {
1386       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1387       cnt += 2 + recvs2[cnt+1];
1388     }
1389   }
1390   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1391   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1392 
1393   /* sort the node indexing by their global numbers */
1394   nt = *nproc;
1395   for (i=0; i<nt; i++) {
1396     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1397     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1398     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1399     ierr = PetscFree(tmp);CHKERRQ(ierr);
1400   }
1401 
1402   if (debug) { /* -----------------------------------  */
1403     nt = *nproc;
1404     for (i=0; i<nt; i++) {
1405       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1406       for (j=0; j<(*numprocs)[i]; j++) {
1407         ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]);CHKERRQ(ierr);
1408       }
1409       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1410     }
1411     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1412   } /* -----------------------------------  */
1413 
1414   /* wait on sends */
1415   if (nsends2) {
1416     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1417     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr);
1418     ierr = PetscFree(send_status);CHKERRQ(ierr);
1419   }
1420 
1421   ierr = PetscFree(starts3);CHKERRQ(ierr);
1422   ierr = PetscFree(dest);CHKERRQ(ierr);
1423   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1424 
1425   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1426   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1427   ierr = PetscFree(starts);CHKERRQ(ierr);
1428   ierr = PetscFree(starts2);CHKERRQ(ierr);
1429   ierr = PetscFree(lens2);CHKERRQ(ierr);
1430 
1431   ierr = PetscFree(source);CHKERRQ(ierr);
1432   ierr = PetscFree(len);CHKERRQ(ierr);
1433   ierr = PetscFree(recvs);CHKERRQ(ierr);
1434   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1435   ierr = PetscFree(sends2);CHKERRQ(ierr);
1436 
1437   /* put the information about myself as the first entry in the list */
1438   first_procs    = (*procs)[0];
1439   first_numprocs = (*numprocs)[0];
1440   first_indices  = (*indices)[0];
1441   for (i=0; i<*nproc; i++) {
1442     if ((*procs)[i] == rank) {
1443       (*procs)[0]    = (*procs)[i];
1444       (*numprocs)[0] = (*numprocs)[i];
1445       (*indices)[0]  = (*indices)[i];
1446       (*procs)[i]    = first_procs;
1447       (*numprocs)[i] = first_numprocs;
1448       (*indices)[i]  = first_indices;
1449       break;
1450     }
1451   }
1452 
1453   /* save info for reuse */
1454   mapping->info_nproc = *nproc;
1455   mapping->info_procs = *procs;
1456   mapping->info_numprocs = *numprocs;
1457   mapping->info_indices = *indices;
1458   mapping->info_cached = PETSC_TRUE;
1459   PetscFunctionReturn(0);
1460 }
1461 
1462 /*@C
1463     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1464 
1465     Collective on ISLocalToGlobalMapping
1466 
1467     Input Parameter:
1468 .   mapping - the mapping from local to global indexing
1469 
1470     Output Parameters:
1471 +   nproc - number of processors that are connected to this one
1472 .   proc - neighboring processors
1473 .   numproc - number of indices for each processor
1474 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1475 
1476     Level: advanced
1477 
1478 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1479           ISLocalToGlobalMappingGetInfo()
1480 @*/
1481 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1482 {
1483   PetscErrorCode ierr;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1487   if (mapping->info_free) {
1488     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1489     if (*indices) {
1490       PetscInt i;
1491 
1492       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1493       for (i=1; i<*nproc; i++) {
1494         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1495       }
1496       ierr = PetscFree(*indices);CHKERRQ(ierr);
1497     }
1498   }
1499   *nproc    = 0;
1500   *procs    = NULL;
1501   *numprocs = NULL;
1502   *indices  = NULL;
1503   PetscFunctionReturn(0);
1504 }
1505 
1506 /*@C
1507     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1508      each index shared by more than one processor
1509 
1510     Collective on ISLocalToGlobalMapping
1511 
1512     Input Parameter:
1513 .   mapping - the mapping from local to global indexing
1514 
1515     Output Parameters:
1516 +   nproc - number of processors that are connected to this one
1517 .   proc - neighboring processors
1518 .   numproc - number of indices for each subdomain (processor)
1519 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1520 
1521     Level: advanced
1522 
1523     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1524 
1525     Fortran Usage:
1526 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1527 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1528           PetscInt indices[nproc][numprocmax],ierr)
1529         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1530         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1531 
1532 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1533           ISLocalToGlobalMappingRestoreInfo()
1534 @*/
1535 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1536 {
1537   PetscErrorCode ierr;
1538   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1539 
1540   PetscFunctionBegin;
1541   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1542   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1543   if (bs > 1) { /* we need to expand the cached info */
1544     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1545     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1546     for (i=0; i<*nproc; i++) {
1547       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1548       for (j=0; j<bnumprocs[i]; j++) {
1549         for (k=0; k<bs; k++) {
1550           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1551         }
1552       }
1553       (*numprocs)[i] = bnumprocs[i]*bs;
1554     }
1555     mapping->info_free = PETSC_TRUE;
1556   } else {
1557     *numprocs = bnumprocs;
1558     *indices  = bindices;
1559   }
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 /*@C
1564     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1565 
1566     Collective on ISLocalToGlobalMapping
1567 
1568     Input Parameter:
1569 .   mapping - the mapping from local to global indexing
1570 
1571     Output Parameters:
1572 +   nproc - number of processors that are connected to this one
1573 .   proc - neighboring processors
1574 .   numproc - number of indices for each processor
1575 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1576 
1577     Level: advanced
1578 
1579 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1580           ISLocalToGlobalMappingGetInfo()
1581 @*/
1582 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1583 {
1584   PetscErrorCode ierr;
1585 
1586   PetscFunctionBegin;
1587   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1588   PetscFunctionReturn(0);
1589 }
1590 
1591 /*@C
1592     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1593 
1594     Collective on ISLocalToGlobalMapping
1595 
1596     Input Parameter:
1597 .   mapping - the mapping from local to global indexing
1598 
1599     Output Parameters:
1600 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1601 .   count - number of neighboring processors per node
1602 -   indices - indices of processes sharing the node (sorted)
1603 
1604     Level: advanced
1605 
1606     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1607 
1608 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1609           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1610 @*/
1611 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1612 {
1613   PetscInt       n;
1614   PetscErrorCode ierr;
1615 
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1618   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1619   if (!mapping->info_nodec) {
1620     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1621 
1622     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1623     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1624     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1625     m = n;
1626     mapping->info_nodec[n] = 0;
1627     for (i=1;i<n_neigh;i++) {
1628       PetscInt j;
1629 
1630       m += n_shared[i];
1631       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1632     }
1633     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1634     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1635     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1636     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1637     for (i=1;i<n_neigh;i++) {
1638       PetscInt j;
1639 
1640       for (j=0;j<n_shared[i];j++) {
1641         PetscInt k = shared[i][j];
1642 
1643         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1644         mapping->info_nodec[k] += 1;
1645       }
1646     }
1647     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1648     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1649   }
1650   if (nnodes)  *nnodes  = n;
1651   if (count)   *count   = mapping->info_nodec;
1652   if (indices) *indices = mapping->info_nodei;
1653   PetscFunctionReturn(0);
1654 }
1655 
1656 /*@C
1657     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1658 
1659     Collective on ISLocalToGlobalMapping
1660 
1661     Input Parameter:
1662 .   mapping - the mapping from local to global indexing
1663 
1664     Output Parameters:
1665 +   nnodes - number of local nodes
1666 .   count - number of neighboring processors per node
1667 -   indices - indices of processes sharing the node (sorted)
1668 
1669     Level: advanced
1670 
1671 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1672           ISLocalToGlobalMappingGetInfo()
1673 @*/
1674 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1675 {
1676   PetscFunctionBegin;
1677   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1678   if (nnodes)  *nnodes  = 0;
1679   if (count)   *count   = NULL;
1680   if (indices) *indices = NULL;
1681   PetscFunctionReturn(0);
1682 }
1683 
1684 /*@C
1685    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1686 
1687    Not Collective
1688 
1689    Input Parameter:
1690 . ltog - local to global mapping
1691 
1692    Output Parameter:
1693 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1694 
1695    Level: advanced
1696 
1697    Notes:
1698     ISLocalToGlobalMappingGetSize() returns the length the this array
1699 
1700 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1701 @*/
1702 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1703 {
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1706   PetscValidPointer(array,2);
1707   if (ltog->bs == 1) {
1708     *array = ltog->indices;
1709   } else {
1710     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1711     const PetscInt *ii;
1712     PetscErrorCode ierr;
1713 
1714     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1715     *array = jj;
1716     k    = 0;
1717     ii   = ltog->indices;
1718     for (i=0; i<n; i++)
1719       for (j=0; j<bs; j++)
1720         jj[k++] = bs*ii[i] + j;
1721   }
1722   PetscFunctionReturn(0);
1723 }
1724 
1725 /*@C
1726    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1727 
1728    Not Collective
1729 
1730    Input Parameters:
1731 + ltog - local to global mapping
1732 - array - array of indices
1733 
1734    Level: advanced
1735 
1736 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1737 @*/
1738 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1739 {
1740   PetscFunctionBegin;
1741   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1742   PetscValidPointer(array,2);
1743   PetscCheckFalse(ltog->bs == 1 && *array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1744 
1745   if (ltog->bs > 1) {
1746     PetscErrorCode ierr;
1747     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1748   }
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 /*@C
1753    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1754 
1755    Not Collective
1756 
1757    Input Parameter:
1758 . ltog - local to global mapping
1759 
1760    Output Parameter:
1761 . array - array of indices
1762 
1763    Level: advanced
1764 
1765 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1766 @*/
1767 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1768 {
1769   PetscFunctionBegin;
1770   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1771   PetscValidPointer(array,2);
1772   *array = ltog->indices;
1773   PetscFunctionReturn(0);
1774 }
1775 
1776 /*@C
1777    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1778 
1779    Not Collective
1780 
1781    Input Parameters:
1782 + ltog - local to global mapping
1783 - array - array of indices
1784 
1785    Level: advanced
1786 
1787 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1788 @*/
1789 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1790 {
1791   PetscFunctionBegin;
1792   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1793   PetscValidPointer(array,2);
1794   PetscCheckFalse(*array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1795   *array = NULL;
1796   PetscFunctionReturn(0);
1797 }
1798 
1799 /*@C
1800    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1801 
1802    Not Collective
1803 
1804    Input Parameters:
1805 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1806 . n - number of mappings to concatenate
1807 - ltogs - local to global mappings
1808 
1809    Output Parameter:
1810 . ltogcat - new mapping
1811 
1812    Note: this currently always returns a mapping with block size of 1
1813 
1814    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1815 
1816    Level: advanced
1817 
1818 .seealso: ISLocalToGlobalMappingCreate()
1819 @*/
1820 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1821 {
1822   PetscInt       i,cnt,m,*idx;
1823   PetscErrorCode ierr;
1824 
1825   PetscFunctionBegin;
1826   PetscCheckFalse(n < 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1827   if (n > 0) PetscValidPointer(ltogs,3);
1828   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1829   PetscValidPointer(ltogcat,4);
1830   for (cnt=0,i=0; i<n; i++) {
1831     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1832     cnt += m;
1833   }
1834   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1835   for (cnt=0,i=0; i<n; i++) {
1836     const PetscInt *subidx;
1837     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1838     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1839     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1840     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1841     cnt += m;
1842   }
1843   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 /*MC
1848       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1849                                     used this is good for only small and moderate size problems.
1850 
1851    Options Database Keys:
1852 .   -islocaltoglobalmapping_type basic - select this method
1853 
1854    Level: beginner
1855 
1856 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1857 M*/
1858 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1859 {
1860   PetscFunctionBegin;
1861   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1862   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1863   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1864   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1865   PetscFunctionReturn(0);
1866 }
1867 
1868 /*MC
1869       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1870                                     used this is good for large memory problems.
1871 
1872    Options Database Keys:
1873 .   -islocaltoglobalmapping_type hash - select this method
1874 
1875    Notes:
1876     This is selected automatically for large problems if the user does not set the type.
1877 
1878    Level: beginner
1879 
1880 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1881 M*/
1882 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1883 {
1884   PetscFunctionBegin;
1885   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1886   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1887   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1888   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1889   PetscFunctionReturn(0);
1890 }
1891 
1892 /*@C
1893     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1894 
1895    Not Collective
1896 
1897    Input Parameters:
1898 +  sname - name of a new method
1899 -  routine_create - routine to create method context
1900 
1901    Notes:
1902    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1903 
1904    Sample usage:
1905 .vb
1906    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1907 .ve
1908 
1909    Then, your mapping can be chosen with the procedural interface via
1910 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1911    or at runtime via the option
1912 $     -islocaltoglobalmapping_type my_mapper
1913 
1914    Level: advanced
1915 
1916 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1917 
1918 @*/
1919 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1920 {
1921   PetscErrorCode ierr;
1922 
1923   PetscFunctionBegin;
1924   ierr = ISInitializePackage();CHKERRQ(ierr);
1925   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1926   PetscFunctionReturn(0);
1927 }
1928 
1929 /*@C
1930    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1931 
1932    Logically Collective on ISLocalToGlobalMapping
1933 
1934    Input Parameters:
1935 +  ltog - the ISLocalToGlobalMapping object
1936 -  type - a known method
1937 
1938    Options Database Key:
1939 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1940     of available methods (for instance, basic or hash)
1941 
1942    Notes:
1943    See "petsc/include/petscis.h" for available methods
1944 
1945   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1946   then set the ISLocalToGlobalMapping type from the options database rather than by using
1947   this routine.
1948 
1949   Level: intermediate
1950 
1951   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1952   are accessed by ISLocalToGlobalMappingSetType().
1953 
1954 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1955 
1956 @*/
1957 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1958 {
1959   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1960   PetscBool      match;
1961 
1962   PetscFunctionBegin;
1963   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1964   PetscValidCharPointer(type,2);
1965 
1966   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1967   if (match) PetscFunctionReturn(0);
1968 
1969   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1970   PetscCheckFalse(!r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1971   /* Destroy the previous private LTOG context */
1972   if (ltog->ops->destroy) {
1973     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1974     ltog->ops->destroy = NULL;
1975   }
1976   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1977   ierr = (*r)(ltog);CHKERRQ(ierr);
1978   PetscFunctionReturn(0);
1979 }
1980 
1981 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1982 
1983 /*@C
1984   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1985 
1986   Not Collective
1987 
1988   Level: advanced
1989 
1990 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1991 @*/
1992 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1993 {
1994   PetscErrorCode ierr;
1995 
1996   PetscFunctionBegin;
1997   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1998   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1999 
2000   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
2001   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
2002   PetscFunctionReturn(0);
2003 }
2004