xref: /petsc/src/vec/is/utils/isltog.c (revision 4e278199b78715991f5c71ebbd945c1489263e6c)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47   PetscErrorCode ierr;
48 
49   PetscFunctionBeginHot;
50   *pStart = 0;
51   *points = NULL;
52   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
54   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
55   *pEnd   = *pStart + numCells;
56   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
57   PetscFunctionReturn(0);
58 }
59 
60 /*@C
61   ISRestorePointRange - Destroys the traversal description
62 
63   Not collective
64 
65   Input Parameters:
66 + pointIS - The IS object
67 . pStart  - The first index, from ISGetPointRange()
68 . pEnd    - One past the last index, from ISGetPointRange()
69 - points  - The indices, from ISGetPointRange()
70 
71   Notes:
72   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
73 $ ISGetPointRange(is, &pStart, &pEnd, &points);
74 $ for (p = pStart; p < pEnd; ++p) {
75 $   const PetscInt point = points ? points[p] : p;
76 $ }
77 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
78 
79   Level: intermediate
80 
81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
82 @*/
83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
84 {
85   PetscInt       step = 1;
86   PetscBool      isStride;
87   PetscErrorCode ierr;
88 
89   PetscFunctionBeginHot;
90   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
91   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
92   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
93   PetscFunctionReturn(0);
94 }
95 
96 /*@C
97   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
98 
99   Not collective
100 
101   Input Parameters:
102 + subpointIS - The IS object to be configured
103 . pStar   t  - The first index of the subrange
104 . pEnd       - One past the last index for the subrange
105 - points     - The indices for the entire range, from ISGetPointRange()
106 
107   Output Parameters:
108 . subpointIS - The IS object now configured to be a subrange
109 
110   Notes:
111   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
112 
113   Level: intermediate
114 
115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116 @*/
117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118 {
119   PetscErrorCode ierr;
120 
121   PetscFunctionBeginHot;
122   if (points) {
123     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
124     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
125   } else {
126     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
127     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 /* -----------------------------------------------------------------------------------------*/
133 
134 /*
135     Creates the global mapping information in the ISLocalToGlobalMapping structure
136 
137     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138 */
139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140 {
141   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   if (mapping->data) PetscFunctionReturn(0);
146   end   = 0;
147   start = PETSC_MAX_INT;
148 
149   for (i=0; i<n; i++) {
150     if (idx[i] < 0) continue;
151     if (idx[i] < start) start = idx[i];
152     if (idx[i] > end)   end   = idx[i];
153   }
154   if (start > end) {start = 0; end = -1;}
155   mapping->globalstart = start;
156   mapping->globalend   = end;
157   if (!((PetscObject)mapping)->type_name) {
158     if ((end - start) > PetscMax(4*n,1000000)) {
159       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
160     } else {
161       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
162     }
163   }
164   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169 {
170   PetscErrorCode              ierr;
171   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172   ISLocalToGlobalMapping_Basic *map;
173 
174   PetscFunctionBegin;
175   start            = mapping->globalstart;
176   end              = mapping->globalend;
177   ierr             = PetscNew(&map);CHKERRQ(ierr);
178   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
179   map->globals     = globals;
180   for (i=0; i<end-start+1; i++) globals[i] = -1;
181   for (i=0; i<n; i++) {
182     if (idx[i] < 0) continue;
183     globals[idx[i] - start] = i;
184   }
185   mapping->data = (void*)map;
186   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191 {
192   PetscErrorCode              ierr;
193   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194   ISLocalToGlobalMapping_Hash *map;
195 
196   PetscFunctionBegin;
197   ierr = PetscNew(&map);CHKERRQ(ierr);
198   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
199   for (i=0; i<n; i++) {
200     if (idx[i] < 0) continue;
201     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
202   }
203   mapping->data = (void*)map;
204   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209 {
210   PetscErrorCode              ierr;
211   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
212 
213   PetscFunctionBegin;
214   if (!map) PetscFunctionReturn(0);
215   ierr = PetscFree(map->globals);CHKERRQ(ierr);
216   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221 {
222   PetscErrorCode              ierr;
223   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
224 
225   PetscFunctionBegin;
226   if (!map) PetscFunctionReturn(0);
227   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
228   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME _Basic
234 #define GTOLBS mapping->bs
235 #define GTOL(g, local) do {                  \
236     local = map->globals[g/bs - start];      \
237     if (local >= 0) local = bs*local + (g % bs); \
238   } while (0)
239 
240 #include <../src/vec/is/utils/isltog.h>
241 
242 #define GTOLTYPE _Basic
243 #define GTOLNAME Block_Basic
244 #define GTOLBS 1
245 #define GTOL(g, local) do {                  \
246     local = map->globals[g - start];         \
247   } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME _Hash
252 #define GTOLBS mapping->bs
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255     if (local >= 0) local = bs*local + (g % bs);   \
256    } while (0)
257 #include <../src/vec/is/utils/isltog.h>
258 
259 #define GTOLTYPE _Hash
260 #define GTOLNAME Block_Hash
261 #define GTOLBS 1
262 #define GTOL(g, local) do {                         \
263     (void)PetscHMapIGet(map->globalht,g,&local);    \
264   } while (0)
265 #include <../src/vec/is/utils/isltog.h>
266 
267 /*@
268     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
269 
270     Not Collective
271 
272     Input Parameter:
273 .   ltog - local to global mapping
274 
275     Output Parameter:
276 .   nltog - the duplicated local to global mapping
277 
278     Level: advanced
279 
280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281 @*/
282 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283 {
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
288   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
289   PetscFunctionReturn(0);
290 }
291 
292 /*@
293     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
294 
295     Not Collective
296 
297     Input Parameter:
298 .   ltog - local to global mapping
299 
300     Output Parameter:
301 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
302 
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
308 {
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
311   PetscValidIntPointer(n,2);
312   *n = mapping->bs*mapping->n;
313   PetscFunctionReturn(0);
314 }
315 
316 /*@C
317    ISLocalToGlobalMappingViewFromOptions - View from Options
318 
319    Collective on ISLocalToGlobalMapping
320 
321    Input Parameters:
322 +  A - the local to global mapping object
323 .  obj - Optional object
324 -  name - command line option
325 
326    Level: intermediate
327 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
328 @*/
329 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
335   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 /*@C
340     ISLocalToGlobalMappingView - View a local to global mapping
341 
342     Not Collective
343 
344     Input Parameters:
345 +   ltog - local to global mapping
346 -   viewer - viewer
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
351 @*/
352 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
353 {
354   PetscInt       i;
355   PetscMPIInt    rank;
356   PetscBool      iascii;
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
361   if (!viewer) {
362     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
363   }
364   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
365 
366   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr);
367   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
368   if (iascii) {
369     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
370     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
371     for (i=0; i<mapping->n; i++) {
372       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
373     }
374     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
375     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
376   }
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
382     ordering and a global parallel ordering.
383 
384     Not collective
385 
386     Input Parameter:
387 .   is - index set containing the global numbers for each local number
388 
389     Output Parameter:
390 .   mapping - new mapping data structure
391 
392     Notes:
393     the block size of the IS determines the block size of the mapping
394     Level: advanced
395 
396 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
397 @*/
398 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
399 {
400   PetscErrorCode ierr;
401   PetscInt       n,bs;
402   const PetscInt *indices;
403   MPI_Comm       comm;
404   PetscBool      isblock;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(is,IS_CLASSID,1);
408   PetscValidPointer(mapping,2);
409 
410   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
411   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
412   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
413   if (!isblock) {
414     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
415     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
416     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
417   } else {
418     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
419     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
420     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
421     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
422   }
423   PetscFunctionReturn(0);
424 }
425 
426 /*@C
427     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
428     ordering and a global parallel ordering.
429 
430     Collective
431 
432     Input Parameter:
433 +   sf - star forest mapping contiguous local indices to (rank, offset)
434 -   start - first global index on this process
435 
436     Output Parameter:
437 .   mapping - new mapping data structure
438 
439     Level: advanced
440 
441 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
442 @*/
443 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
444 {
445   PetscErrorCode ierr;
446   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
447   const PetscInt *ilocal;
448   MPI_Comm       comm;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
452   PetscValidPointer(mapping,3);
453 
454   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
455   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
456   if (ilocal) {
457     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
458   }
459   else maxlocal = nleaves;
460   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
461   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
462   for (i=0; i<nroots; i++) globals[i] = start + i;
463   for (i=0; i<maxlocal; i++) ltog[i] = -1;
464   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
465   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
466   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
467   ierr = PetscFree(globals);CHKERRQ(ierr);
468   PetscFunctionReturn(0);
469 }
470 
471 /*@
472     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
473 
474     Not collective
475 
476     Input Parameters:
477 +   mapping - mapping data structure
478 -   bs - the blocksize
479 
480     Level: advanced
481 
482 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
483 @*/
484 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
485 {
486   PetscInt       *nid;
487   const PetscInt *oid;
488   PetscInt       i,cn,on,obs,nn;
489   PetscErrorCode ierr;
490 
491   PetscFunctionBegin;
492   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
493   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
494   if (bs == mapping->bs) PetscFunctionReturn(0);
495   on  = mapping->n;
496   obs = mapping->bs;
497   oid = mapping->indices;
498   nn  = (on*obs)/bs;
499   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
500 
501   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
502   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
503   for (i=0;i<nn;i++) {
504     PetscInt j;
505     for (j=0,cn=0;j<bs-1;j++) {
506       if (oid[i*bs+j] < 0) { cn++; continue; }
507       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
508     }
509     if (oid[i*bs+j] < 0) cn++;
510     if (cn) {
511       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
512       nid[i] = -1;
513     } else {
514       nid[i] = oid[i*bs]/bs;
515     }
516   }
517   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
518 
519   mapping->n           = nn;
520   mapping->bs          = bs;
521   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
522   mapping->indices     = nid;
523   mapping->globalstart = 0;
524   mapping->globalend   = 0;
525 
526   /* reset the cached information */
527   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
528   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
529   if (mapping->info_indices) {
530     PetscInt i;
531 
532     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
533     for (i=1; i<mapping->info_nproc; i++) {
534       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
535     }
536     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
537   }
538   mapping->info_cached = PETSC_FALSE;
539 
540   if (mapping->ops->destroy) {
541     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
542   }
543   PetscFunctionReturn(0);
544 }
545 
546 /*@
547     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
548     ordering and a global parallel ordering.
549 
550     Not Collective
551 
552     Input Parameters:
553 .   mapping - mapping data structure
554 
555     Output Parameter:
556 .   bs - the blocksize
557 
558     Level: advanced
559 
560 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
561 @*/
562 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
563 {
564   PetscFunctionBegin;
565   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
566   *bs = mapping->bs;
567   PetscFunctionReturn(0);
568 }
569 
570 /*@
571     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
572     ordering and a global parallel ordering.
573 
574     Not Collective, but communicator may have more than one process
575 
576     Input Parameters:
577 +   comm - MPI communicator
578 .   bs - the block size
579 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
580 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
581 -   mode - see PetscCopyMode
582 
583     Output Parameter:
584 .   mapping - new mapping data structure
585 
586     Notes:
587     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
588 
589     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
590     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
591     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
592 
593     Level: advanced
594 
595 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
596           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
597 @*/
598 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
599 {
600   PetscErrorCode ierr;
601   PetscInt       *in;
602 
603   PetscFunctionBegin;
604   if (n) PetscValidIntPointer(indices,4);
605   PetscValidPointer(mapping,6);
606 
607   *mapping = NULL;
608   ierr = ISInitializePackage();CHKERRQ(ierr);
609 
610   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
611                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
612   (*mapping)->n             = n;
613   (*mapping)->bs            = bs;
614   (*mapping)->info_cached   = PETSC_FALSE;
615   (*mapping)->info_free     = PETSC_FALSE;
616   (*mapping)->info_procs    = NULL;
617   (*mapping)->info_numprocs = NULL;
618   (*mapping)->info_indices  = NULL;
619   (*mapping)->info_nodec    = NULL;
620   (*mapping)->info_nodei    = NULL;
621 
622   (*mapping)->ops->globaltolocalmappingapply      = NULL;
623   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
624   (*mapping)->ops->destroy                        = NULL;
625   if (mode == PETSC_COPY_VALUES) {
626     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
627     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
628     (*mapping)->indices = in;
629     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
630   } else if (mode == PETSC_OWN_POINTER) {
631     (*mapping)->indices = (PetscInt*)indices;
632     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
633   }
634   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
635   PetscFunctionReturn(0);
636 }
637 
638 PetscFunctionList ISLocalToGlobalMappingList = NULL;
639 
640 /*@
641    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
642 
643    Not collective
644 
645    Input Parameters:
646 .  mapping - mapping data structure
647 
648    Level: advanced
649 
650 @*/
651 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
652 {
653   PetscErrorCode             ierr;
654   char                       type[256];
655   ISLocalToGlobalMappingType defaulttype = "Not set";
656   PetscBool                  flg;
657 
658   PetscFunctionBegin;
659   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
660   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
661   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
662   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
663   if (flg) {
664     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
665   }
666   ierr = PetscOptionsEnd();CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 /*@
671    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
672    ordering and a global parallel ordering.
673 
674    Note Collective
675 
676    Input Parameters:
677 .  mapping - mapping data structure
678 
679    Level: advanced
680 
681 .seealso: ISLocalToGlobalMappingCreate()
682 @*/
683 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
684 {
685   PetscErrorCode ierr;
686 
687   PetscFunctionBegin;
688   if (!*mapping) PetscFunctionReturn(0);
689   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
690   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
691   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
692   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
693   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
694   if ((*mapping)->info_indices) {
695     PetscInt i;
696 
697     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
698     for (i=1; i<(*mapping)->info_nproc; i++) {
699       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
700     }
701     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
702   }
703   if ((*mapping)->info_nodei) {
704     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
705   }
706   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
707   if ((*mapping)->ops->destroy) {
708     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
709   }
710   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
711   *mapping = NULL;
712   PetscFunctionReturn(0);
713 }
714 
715 /*@
716     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
717     a new index set using the global numbering defined in an ISLocalToGlobalMapping
718     context.
719 
720     Collective on is
721 
722     Input Parameters:
723 +   mapping - mapping between local and global numbering
724 -   is - index set in local numbering
725 
726     Output Parameters:
727 .   newis - index set in global numbering
728 
729     Notes:
730     The output IS will have the same communicator of the input IS.
731 
732     Level: advanced
733 
734 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
735           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
736 @*/
737 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
738 {
739   PetscErrorCode ierr;
740   PetscInt       n,*idxout;
741   const PetscInt *idxin;
742 
743   PetscFunctionBegin;
744   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
745   PetscValidHeaderSpecific(is,IS_CLASSID,2);
746   PetscValidPointer(newis,3);
747 
748   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
749   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
750   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
751   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
752   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
753   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
754   PetscFunctionReturn(0);
755 }
756 
757 /*@
758    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
759    and converts them to the global numbering.
760 
761    Not collective
762 
763    Input Parameters:
764 +  mapping - the local to global mapping context
765 .  N - number of integers
766 -  in - input indices in local numbering
767 
768    Output Parameter:
769 .  out - indices in global numbering
770 
771    Notes:
772    The in and out array parameters may be identical.
773 
774    Level: advanced
775 
776 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
777           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
778           AOPetscToApplication(), ISGlobalToLocalMappingApply()
779 
780 @*/
781 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
782 {
783   PetscInt i,bs,Nmax;
784 
785   PetscFunctionBegin;
786   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
787   bs   = mapping->bs;
788   Nmax = bs*mapping->n;
789   if (bs == 1) {
790     const PetscInt *idx = mapping->indices;
791     for (i=0; i<N; i++) {
792       if (in[i] < 0) {
793         out[i] = in[i];
794         continue;
795       }
796       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
797       out[i] = idx[in[i]];
798     }
799   } else {
800     const PetscInt *idx = mapping->indices;
801     for (i=0; i<N; i++) {
802       if (in[i] < 0) {
803         out[i] = in[i];
804         continue;
805       }
806       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
807       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
808     }
809   }
810   PetscFunctionReturn(0);
811 }
812 
813 /*@
814    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
815 
816    Not collective
817 
818    Input Parameters:
819 +  mapping - the local to global mapping context
820 .  N - number of integers
821 -  in - input indices in local block numbering
822 
823    Output Parameter:
824 .  out - indices in global block numbering
825 
826    Notes:
827    The in and out array parameters may be identical.
828 
829    Example:
830      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
831      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
832 
833    Level: advanced
834 
835 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
836           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
837           AOPetscToApplication(), ISGlobalToLocalMappingApply()
838 
839 @*/
840 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
841 {
842 
843   PetscFunctionBegin;
844   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
845   {
846     PetscInt i,Nmax = mapping->n;
847     const PetscInt *idx = mapping->indices;
848 
849     for (i=0; i<N; i++) {
850       if (in[i] < 0) {
851         out[i] = in[i];
852         continue;
853       }
854       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
855       out[i] = idx[in[i]];
856     }
857   }
858   PetscFunctionReturn(0);
859 }
860 
861 /*@
862     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
863     specified with a global numbering.
864 
865     Not collective
866 
867     Input Parameters:
868 +   mapping - mapping between local and global numbering
869 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
870            IS_GTOLM_DROP - drops the indices with no local value from the output list
871 .   n - number of global indices to map
872 -   idx - global indices to map
873 
874     Output Parameters:
875 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
876 -   idxout - local index of each global index, one must pass in an array long enough
877              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
878              idxout == NULL to determine the required length (returned in nout)
879              and then allocate the required space and call ISGlobalToLocalMappingApply()
880              a second time to set the values.
881 
882     Notes:
883     Either nout or idxout may be NULL. idx and idxout may be identical.
884 
885     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
886     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
887     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
888 
889     Level: advanced
890 
891     Developer Note: The manual page states that idx and idxout may be identical but the calling
892        sequence declares idx as const so it cannot be the same as idxout.
893 
894 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
895           ISLocalToGlobalMappingDestroy()
896 @*/
897 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
898 {
899   PetscErrorCode ierr;
900 
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
903   if (!mapping->data) {
904     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
905   }
906   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 /*@
911     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
912     a new index set using the local numbering defined in an ISLocalToGlobalMapping
913     context.
914 
915     Not collective
916 
917     Input Parameters:
918 +   mapping - mapping between local and global numbering
919 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
920            IS_GTOLM_DROP - drops the indices with no local value from the output list
921 -   is - index set in global numbering
922 
923     Output Parameters:
924 .   newis - index set in local numbering
925 
926     Notes:
927     The output IS will be sequential, as it encodes a purely local operation
928 
929     Level: advanced
930 
931 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
932           ISLocalToGlobalMappingDestroy()
933 @*/
934 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
935 {
936   PetscErrorCode ierr;
937   PetscInt       n,nout,*idxout;
938   const PetscInt *idxin;
939 
940   PetscFunctionBegin;
941   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
942   PetscValidHeaderSpecific(is,IS_CLASSID,3);
943   PetscValidPointer(newis,4);
944 
945   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
946   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
947   if (type == IS_GTOLM_MASK) {
948     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
949   } else {
950     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
951     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
952   }
953   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
954   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
955   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958 
959 /*@
960     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
961     specified with a block global numbering.
962 
963     Not collective
964 
965     Input Parameters:
966 +   mapping - mapping between local and global numbering
967 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
968            IS_GTOLM_DROP - drops the indices with no local value from the output list
969 .   n - number of global indices to map
970 -   idx - global indices to map
971 
972     Output Parameters:
973 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
974 -   idxout - local index of each global index, one must pass in an array long enough
975              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
976              idxout == NULL to determine the required length (returned in nout)
977              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
978              a second time to set the values.
979 
980     Notes:
981     Either nout or idxout may be NULL. idx and idxout may be identical.
982 
983     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
984     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
985     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
986 
987     Level: advanced
988 
989     Developer Note: The manual page states that idx and idxout may be identical but the calling
990        sequence declares idx as const so it cannot be the same as idxout.
991 
992 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
993           ISLocalToGlobalMappingDestroy()
994 @*/
995 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
996                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
997 {
998   PetscErrorCode ierr;
999 
1000   PetscFunctionBegin;
1001   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1002   if (!mapping->data) {
1003     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
1004   }
1005   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 /*@C
1010     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1011      each index shared by more than one processor
1012 
1013     Collective on ISLocalToGlobalMapping
1014 
1015     Input Parameters:
1016 .   mapping - the mapping from local to global indexing
1017 
1018     Output Parameter:
1019 +   nproc - number of processors that are connected to this one
1020 .   proc - neighboring processors
1021 .   numproc - number of indices for each subdomain (processor)
1022 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1023 
1024     Level: advanced
1025 
1026     Fortran Usage:
1027 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1028 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1029           PetscInt indices[nproc][numprocmax],ierr)
1030         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1031         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1032 
1033 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1034           ISLocalToGlobalMappingRestoreInfo()
1035 @*/
1036 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1037 {
1038   PetscErrorCode ierr;
1039 
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1042   if (mapping->info_cached) {
1043     *nproc    = mapping->info_nproc;
1044     *procs    = mapping->info_procs;
1045     *numprocs = mapping->info_numprocs;
1046     *indices  = mapping->info_indices;
1047   } else {
1048     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1049   }
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1054 {
1055   PetscErrorCode ierr;
1056   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1057   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1058   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1059   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1060   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1061   PetscInt       first_procs,first_numprocs,*first_indices;
1062   MPI_Request    *recv_waits,*send_waits;
1063   MPI_Status     recv_status,*send_status,*recv_statuses;
1064   MPI_Comm       comm;
1065   PetscBool      debug = PETSC_FALSE;
1066 
1067   PetscFunctionBegin;
1068   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1069   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1070   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1071   if (size == 1) {
1072     *nproc         = 0;
1073     *procs         = NULL;
1074     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1075     (*numprocs)[0] = 0;
1076     ierr           = PetscNew(indices);CHKERRQ(ierr);
1077     (*indices)[0]  = NULL;
1078     /* save info for reuse */
1079     mapping->info_nproc = *nproc;
1080     mapping->info_procs = *procs;
1081     mapping->info_numprocs = *numprocs;
1082     mapping->info_indices = *indices;
1083     mapping->info_cached = PETSC_TRUE;
1084     PetscFunctionReturn(0);
1085   }
1086 
1087   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1088 
1089   /*
1090     Notes on ISLocalToGlobalMappingGetBlockInfo
1091 
1092     globally owned node - the nodes that have been assigned to this processor in global
1093            numbering, just for this routine.
1094 
1095     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1096            boundary (i.e. is has more than one local owner)
1097 
1098     locally owned node - node that exists on this processors subdomain
1099 
1100     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1101            local subdomain
1102   */
1103   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1104   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1105   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1106 
1107   for (i=0; i<n; i++) {
1108     if (lindices[i] > max) max = lindices[i];
1109   }
1110   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRMPI(ierr);
1111   Ng++;
1112   ierr   = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1113   ierr   = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1114   scale  = Ng/size + 1;
1115   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1116   rstart = scale*rank;
1117 
1118   /* determine ownership ranges of global indices */
1119   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1120   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1121 
1122   /* determine owners of each local node  */
1123   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1124   for (i=0; i<n; i++) {
1125     proc             = lindices[i]/scale; /* processor that globally owns this index */
1126     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1127     owner[i]         = proc;
1128     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1129   }
1130   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1131   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1132 
1133   /* inform other processors of number of messages and max length*/
1134   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1135   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1136 
1137   /* post receives for owned rows */
1138   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1139   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1140   for (i=0; i<nrecvs; i++) {
1141     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr);
1142   }
1143 
1144   /* pack messages containing lists of local nodes to owners */
1145   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1146   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1147   starts[0] = 0;
1148   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1149   for (i=0; i<n; i++) {
1150     sends[starts[owner[i]]++] = lindices[i];
1151     sends[starts[owner[i]]++] = i;
1152   }
1153   ierr = PetscFree(owner);CHKERRQ(ierr);
1154   starts[0] = 0;
1155   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1156 
1157   /* send the messages */
1158   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1159   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1160   cnt = 0;
1161   for (i=0; i<size; i++) {
1162     if (nprocs[2*i]) {
1163       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr);
1164       dest[cnt] = i;
1165       cnt++;
1166     }
1167   }
1168   ierr = PetscFree(starts);CHKERRQ(ierr);
1169 
1170   /* wait on receives */
1171   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1172   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1173   cnt  = nrecvs;
1174   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1175   while (cnt) {
1176     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
1177     /* unpack receives into our local space */
1178     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr);
1179     source[imdex] = recv_status.MPI_SOURCE;
1180     len[imdex]    = len[imdex]/2;
1181     /* count how many local owners for each of my global owned indices */
1182     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1183     cnt--;
1184   }
1185   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1186 
1187   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1188   nowned  = 0;
1189   nownedm = 0;
1190   for (i=0; i<ng; i++) {
1191     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1192   }
1193 
1194   /* create single array to contain rank of all local owners of each globally owned index */
1195   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1196   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1197   starts[0] = 0;
1198   for (i=1; i<ng; i++) {
1199     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1200     else starts[i] = starts[i-1];
1201   }
1202 
1203   /* for each nontrival globally owned node list all arriving processors */
1204   for (i=0; i<nrecvs; i++) {
1205     for (j=0; j<len[i]; j++) {
1206       node = recvs[2*i*nmax+2*j]-rstart;
1207       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1208     }
1209   }
1210 
1211   if (debug) { /* -----------------------------------  */
1212     starts[0] = 0;
1213     for (i=1; i<ng; i++) {
1214       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1215       else starts[i] = starts[i-1];
1216     }
1217     for (i=0; i<ng; i++) {
1218       if (nownedsenders[i] > 1) {
1219         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1220         for (j=0; j<nownedsenders[i]; j++) {
1221           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1222         }
1223         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1224       }
1225     }
1226     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1227   } /* -----------------------------------  */
1228 
1229   /* wait on original sends */
1230   if (nsends) {
1231     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1232     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
1233     ierr = PetscFree(send_status);CHKERRQ(ierr);
1234   }
1235   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1236   ierr = PetscFree(sends);CHKERRQ(ierr);
1237   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1238 
1239   /* pack messages to send back to local owners */
1240   starts[0] = 0;
1241   for (i=1; i<ng; i++) {
1242     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1243     else starts[i] = starts[i-1];
1244   }
1245   nsends2 = nrecvs;
1246   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1247   for (i=0; i<nrecvs; i++) {
1248     nprocs[i] = 1;
1249     for (j=0; j<len[i]; j++) {
1250       node = recvs[2*i*nmax+2*j]-rstart;
1251       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1252     }
1253   }
1254   nt = 0;
1255   for (i=0; i<nsends2; i++) nt += nprocs[i];
1256 
1257   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1258   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1259 
1260   starts2[0] = 0;
1261   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1262   /*
1263      Each message is 1 + nprocs[i] long, and consists of
1264        (0) the number of nodes being sent back
1265        (1) the local node number,
1266        (2) the number of processors sharing it,
1267        (3) the processors sharing it
1268   */
1269   for (i=0; i<nsends2; i++) {
1270     cnt = 1;
1271     sends2[starts2[i]] = 0;
1272     for (j=0; j<len[i]; j++) {
1273       node = recvs[2*i*nmax+2*j]-rstart;
1274       if (nownedsenders[node] > 1) {
1275         sends2[starts2[i]]++;
1276         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1277         sends2[starts2[i]+cnt++] = nownedsenders[node];
1278         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1279         cnt += nownedsenders[node];
1280       }
1281     }
1282   }
1283 
1284   /* receive the message lengths */
1285   nrecvs2 = nsends;
1286   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1287   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1288   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1289   for (i=0; i<nrecvs2; i++) {
1290     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr);
1291   }
1292 
1293   /* send the message lengths */
1294   for (i=0; i<nsends2; i++) {
1295     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr);
1296   }
1297 
1298   /* wait on receives of lens */
1299   if (nrecvs2) {
1300     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1301     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1302     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1303   }
1304   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1305 
1306   starts3[0] = 0;
1307   nt         = 0;
1308   for (i=0; i<nrecvs2-1; i++) {
1309     starts3[i+1] = starts3[i] + lens2[i];
1310     nt          += lens2[i];
1311   }
1312   if (nrecvs2) nt += lens2[nrecvs2-1];
1313 
1314   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1315   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1316   for (i=0; i<nrecvs2; i++) {
1317     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr);
1318   }
1319 
1320   /* send the messages */
1321   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1322   for (i=0; i<nsends2; i++) {
1323     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr);
1324   }
1325 
1326   /* wait on receives */
1327   if (nrecvs2) {
1328     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1329     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1330     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1331   }
1332   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1333   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1334 
1335   if (debug) { /* -----------------------------------  */
1336     cnt = 0;
1337     for (i=0; i<nrecvs2; i++) {
1338       nt = recvs2[cnt++];
1339       for (j=0; j<nt; j++) {
1340         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1341         for (k=0; k<recvs2[cnt+1]; k++) {
1342           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1343         }
1344         cnt += 2 + recvs2[cnt+1];
1345         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1346       }
1347     }
1348     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1349   } /* -----------------------------------  */
1350 
1351   /* count number subdomains for each local node */
1352   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1353   cnt  = 0;
1354   for (i=0; i<nrecvs2; i++) {
1355     nt = recvs2[cnt++];
1356     for (j=0; j<nt; j++) {
1357       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1358       cnt += 2 + recvs2[cnt+1];
1359     }
1360   }
1361   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1362   *nproc    = nt;
1363   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1364   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1365   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1366   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1367   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1368   cnt  = 0;
1369   for (i=0; i<size; i++) {
1370     if (nprocs[i] > 0) {
1371       bprocs[i]        = cnt;
1372       (*procs)[cnt]    = i;
1373       (*numprocs)[cnt] = nprocs[i];
1374       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1375       cnt++;
1376     }
1377   }
1378 
1379   /* make the list of subdomains for each nontrivial local node */
1380   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1381   cnt  = 0;
1382   for (i=0; i<nrecvs2; i++) {
1383     nt = recvs2[cnt++];
1384     for (j=0; j<nt; j++) {
1385       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1386       cnt += 2 + recvs2[cnt+1];
1387     }
1388   }
1389   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1390   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1391 
1392   /* sort the node indexing by their global numbers */
1393   nt = *nproc;
1394   for (i=0; i<nt; i++) {
1395     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1396     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1397     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1398     ierr = PetscFree(tmp);CHKERRQ(ierr);
1399   }
1400 
1401   if (debug) { /* -----------------------------------  */
1402     nt = *nproc;
1403     for (i=0; i<nt; i++) {
1404       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1405       for (j=0; j<(*numprocs)[i]; j++) {
1406         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1407       }
1408       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1409     }
1410     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1411   } /* -----------------------------------  */
1412 
1413   /* wait on sends */
1414   if (nsends2) {
1415     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1416     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr);
1417     ierr = PetscFree(send_status);CHKERRQ(ierr);
1418   }
1419 
1420   ierr = PetscFree(starts3);CHKERRQ(ierr);
1421   ierr = PetscFree(dest);CHKERRQ(ierr);
1422   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1423 
1424   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1425   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1426   ierr = PetscFree(starts);CHKERRQ(ierr);
1427   ierr = PetscFree(starts2);CHKERRQ(ierr);
1428   ierr = PetscFree(lens2);CHKERRQ(ierr);
1429 
1430   ierr = PetscFree(source);CHKERRQ(ierr);
1431   ierr = PetscFree(len);CHKERRQ(ierr);
1432   ierr = PetscFree(recvs);CHKERRQ(ierr);
1433   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1434   ierr = PetscFree(sends2);CHKERRQ(ierr);
1435 
1436   /* put the information about myself as the first entry in the list */
1437   first_procs    = (*procs)[0];
1438   first_numprocs = (*numprocs)[0];
1439   first_indices  = (*indices)[0];
1440   for (i=0; i<*nproc; i++) {
1441     if ((*procs)[i] == rank) {
1442       (*procs)[0]    = (*procs)[i];
1443       (*numprocs)[0] = (*numprocs)[i];
1444       (*indices)[0]  = (*indices)[i];
1445       (*procs)[i]    = first_procs;
1446       (*numprocs)[i] = first_numprocs;
1447       (*indices)[i]  = first_indices;
1448       break;
1449     }
1450   }
1451 
1452   /* save info for reuse */
1453   mapping->info_nproc = *nproc;
1454   mapping->info_procs = *procs;
1455   mapping->info_numprocs = *numprocs;
1456   mapping->info_indices = *indices;
1457   mapping->info_cached = PETSC_TRUE;
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 /*@C
1462     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1463 
1464     Collective on ISLocalToGlobalMapping
1465 
1466     Input Parameters:
1467 .   mapping - the mapping from local to global indexing
1468 
1469     Output Parameter:
1470 +   nproc - number of processors that are connected to this one
1471 .   proc - neighboring processors
1472 .   numproc - number of indices for each processor
1473 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1474 
1475     Level: advanced
1476 
1477 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1478           ISLocalToGlobalMappingGetInfo()
1479 @*/
1480 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1481 {
1482   PetscErrorCode ierr;
1483 
1484   PetscFunctionBegin;
1485   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1486   if (mapping->info_free) {
1487     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1488     if (*indices) {
1489       PetscInt i;
1490 
1491       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1492       for (i=1; i<*nproc; i++) {
1493         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1494       }
1495       ierr = PetscFree(*indices);CHKERRQ(ierr);
1496     }
1497   }
1498   *nproc    = 0;
1499   *procs    = NULL;
1500   *numprocs = NULL;
1501   *indices  = NULL;
1502   PetscFunctionReturn(0);
1503 }
1504 
1505 /*@C
1506     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1507      each index shared by more than one processor
1508 
1509     Collective on ISLocalToGlobalMapping
1510 
1511     Input Parameters:
1512 .   mapping - the mapping from local to global indexing
1513 
1514     Output Parameter:
1515 +   nproc - number of processors that are connected to this one
1516 .   proc - neighboring processors
1517 .   numproc - number of indices for each subdomain (processor)
1518 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1519 
1520     Level: advanced
1521 
1522     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1523 
1524     Fortran Usage:
1525 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1526 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1527           PetscInt indices[nproc][numprocmax],ierr)
1528         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1529         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1530 
1531 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1532           ISLocalToGlobalMappingRestoreInfo()
1533 @*/
1534 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1535 {
1536   PetscErrorCode ierr;
1537   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1538 
1539   PetscFunctionBegin;
1540   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1541   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1542   if (bs > 1) { /* we need to expand the cached info */
1543     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1544     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1545     for (i=0; i<*nproc; i++) {
1546       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1547       for (j=0; j<bnumprocs[i]; j++) {
1548         for (k=0; k<bs; k++) {
1549           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1550         }
1551       }
1552       (*numprocs)[i] = bnumprocs[i]*bs;
1553     }
1554     mapping->info_free = PETSC_TRUE;
1555   } else {
1556     *numprocs = bnumprocs;
1557     *indices  = bindices;
1558   }
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 /*@C
1563     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1564 
1565     Collective on ISLocalToGlobalMapping
1566 
1567     Input Parameters:
1568 .   mapping - the mapping from local to global indexing
1569 
1570     Output Parameter:
1571 +   nproc - number of processors that are connected to this one
1572 .   proc - neighboring processors
1573 .   numproc - number of indices for each processor
1574 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1575 
1576     Level: advanced
1577 
1578 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1579           ISLocalToGlobalMappingGetInfo()
1580 @*/
1581 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1582 {
1583   PetscErrorCode ierr;
1584 
1585   PetscFunctionBegin;
1586   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 /*@C
1591     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1592 
1593     Collective on ISLocalToGlobalMapping
1594 
1595     Input Parameters:
1596 .   mapping - the mapping from local to global indexing
1597 
1598     Output Parameter:
1599 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1600 .   count - number of neighboring processors per node
1601 -   indices - indices of processes sharing the node (sorted)
1602 
1603     Level: advanced
1604 
1605     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1606 
1607 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1608           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1609 @*/
1610 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1611 {
1612   PetscInt       n;
1613   PetscErrorCode ierr;
1614 
1615   PetscFunctionBegin;
1616   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1617   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1618   if (!mapping->info_nodec) {
1619     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1620 
1621     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1622     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1623     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1624     m = n;
1625     mapping->info_nodec[n] = 0;
1626     for (i=1;i<n_neigh;i++) {
1627       PetscInt j;
1628 
1629       m += n_shared[i];
1630       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1631     }
1632     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1633     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1634     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1635     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1636     for (i=1;i<n_neigh;i++) {
1637       PetscInt j;
1638 
1639       for (j=0;j<n_shared[i];j++) {
1640         PetscInt k = shared[i][j];
1641 
1642         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1643         mapping->info_nodec[k] += 1;
1644       }
1645     }
1646     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1647     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1648   }
1649   if (nnodes)  *nnodes  = n;
1650   if (count)   *count   = mapping->info_nodec;
1651   if (indices) *indices = mapping->info_nodei;
1652   PetscFunctionReturn(0);
1653 }
1654 
1655 /*@C
1656     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1657 
1658     Collective on ISLocalToGlobalMapping
1659 
1660     Input Parameters:
1661 .   mapping - the mapping from local to global indexing
1662 
1663     Output Parameter:
1664 +   nnodes - number of local nodes
1665 .   count - number of neighboring processors per node
1666 -   indices - indices of processes sharing the node (sorted)
1667 
1668     Level: advanced
1669 
1670 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1671           ISLocalToGlobalMappingGetInfo()
1672 @*/
1673 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1674 {
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1677   if (nnodes)  *nnodes  = 0;
1678   if (count)   *count   = NULL;
1679   if (indices) *indices = NULL;
1680   PetscFunctionReturn(0);
1681 }
1682 
1683 /*@C
1684    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1685 
1686    Not Collective
1687 
1688    Input Arguments:
1689 . ltog - local to global mapping
1690 
1691    Output Arguments:
1692 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1693 
1694    Level: advanced
1695 
1696    Notes:
1697     ISLocalToGlobalMappingGetSize() returns the length the this array
1698 
1699 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1700 @*/
1701 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1702 {
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1705   PetscValidPointer(array,2);
1706   if (ltog->bs == 1) {
1707     *array = ltog->indices;
1708   } else {
1709     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1710     const PetscInt *ii;
1711     PetscErrorCode ierr;
1712 
1713     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1714     *array = jj;
1715     k    = 0;
1716     ii   = ltog->indices;
1717     for (i=0; i<n; i++)
1718       for (j=0; j<bs; j++)
1719         jj[k++] = bs*ii[i] + j;
1720   }
1721   PetscFunctionReturn(0);
1722 }
1723 
1724 /*@C
1725    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1726 
1727    Not Collective
1728 
1729    Input Arguments:
1730 + ltog - local to global mapping
1731 - array - array of indices
1732 
1733    Level: advanced
1734 
1735 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1736 @*/
1737 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1738 {
1739   PetscFunctionBegin;
1740   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1741   PetscValidPointer(array,2);
1742   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1743 
1744   if (ltog->bs > 1) {
1745     PetscErrorCode ierr;
1746     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1747   }
1748   PetscFunctionReturn(0);
1749 }
1750 
1751 /*@C
1752    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1753 
1754    Not Collective
1755 
1756    Input Arguments:
1757 . ltog - local to global mapping
1758 
1759    Output Arguments:
1760 . array - array of indices
1761 
1762    Level: advanced
1763 
1764 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1765 @*/
1766 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1767 {
1768   PetscFunctionBegin;
1769   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1770   PetscValidPointer(array,2);
1771   *array = ltog->indices;
1772   PetscFunctionReturn(0);
1773 }
1774 
1775 /*@C
1776    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1777 
1778    Not Collective
1779 
1780    Input Arguments:
1781 + ltog - local to global mapping
1782 - array - array of indices
1783 
1784    Level: advanced
1785 
1786 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1787 @*/
1788 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1789 {
1790   PetscFunctionBegin;
1791   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1792   PetscValidPointer(array,2);
1793   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1794   *array = NULL;
1795   PetscFunctionReturn(0);
1796 }
1797 
1798 /*@C
1799    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1800 
1801    Not Collective
1802 
1803    Input Arguments:
1804 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1805 . n - number of mappings to concatenate
1806 - ltogs - local to global mappings
1807 
1808    Output Arguments:
1809 . ltogcat - new mapping
1810 
1811    Note: this currently always returns a mapping with block size of 1
1812 
1813    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1814 
1815    Level: advanced
1816 
1817 .seealso: ISLocalToGlobalMappingCreate()
1818 @*/
1819 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1820 {
1821   PetscInt       i,cnt,m,*idx;
1822   PetscErrorCode ierr;
1823 
1824   PetscFunctionBegin;
1825   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1826   if (n > 0) PetscValidPointer(ltogs,3);
1827   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1828   PetscValidPointer(ltogcat,4);
1829   for (cnt=0,i=0; i<n; i++) {
1830     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1831     cnt += m;
1832   }
1833   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1834   for (cnt=0,i=0; i<n; i++) {
1835     const PetscInt *subidx;
1836     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1837     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1838     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1839     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1840     cnt += m;
1841   }
1842   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 /*MC
1847       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1848                                     used this is good for only small and moderate size problems.
1849 
1850    Options Database Keys:
1851 .   -islocaltoglobalmapping_type basic - select this method
1852 
1853    Level: beginner
1854 
1855 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1856 M*/
1857 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1858 {
1859   PetscFunctionBegin;
1860   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1861   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1862   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1863   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1864   PetscFunctionReturn(0);
1865 }
1866 
1867 /*MC
1868       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1869                                     used this is good for large memory problems.
1870 
1871    Options Database Keys:
1872 .   -islocaltoglobalmapping_type hash - select this method
1873 
1874    Notes:
1875     This is selected automatically for large problems if the user does not set the type.
1876 
1877    Level: beginner
1878 
1879 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1880 M*/
1881 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1882 {
1883   PetscFunctionBegin;
1884   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1885   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1886   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1887   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1888   PetscFunctionReturn(0);
1889 }
1890 
1891 /*@C
1892     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1893 
1894    Not Collective
1895 
1896    Input Parameters:
1897 +  sname - name of a new method
1898 -  routine_create - routine to create method context
1899 
1900    Notes:
1901    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1902 
1903    Sample usage:
1904 .vb
1905    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1906 .ve
1907 
1908    Then, your mapping can be chosen with the procedural interface via
1909 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1910    or at runtime via the option
1911 $     -islocaltoglobalmapping_type my_mapper
1912 
1913    Level: advanced
1914 
1915 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1916 
1917 @*/
1918 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1919 {
1920   PetscErrorCode ierr;
1921 
1922   PetscFunctionBegin;
1923   ierr = ISInitializePackage();CHKERRQ(ierr);
1924   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 /*@C
1929    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1930 
1931    Logically Collective on ISLocalToGlobalMapping
1932 
1933    Input Parameters:
1934 +  ltog - the ISLocalToGlobalMapping object
1935 -  type - a known method
1936 
1937    Options Database Key:
1938 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1939     of available methods (for instance, basic or hash)
1940 
1941    Notes:
1942    See "petsc/include/petscis.h" for available methods
1943 
1944   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1945   then set the ISLocalToGlobalMapping type from the options database rather than by using
1946   this routine.
1947 
1948   Level: intermediate
1949 
1950   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1951   are accessed by ISLocalToGlobalMappingSetType().
1952 
1953 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1954 
1955 @*/
1956 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1957 {
1958   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1959   PetscBool      match;
1960 
1961   PetscFunctionBegin;
1962   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1963   PetscValidCharPointer(type,2);
1964 
1965   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1966   if (match) PetscFunctionReturn(0);
1967 
1968   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1969   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1970   /* Destroy the previous private LTOG context */
1971   if (ltog->ops->destroy) {
1972     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1973     ltog->ops->destroy = NULL;
1974   }
1975   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1976   ierr = (*r)(ltog);CHKERRQ(ierr);
1977   PetscFunctionReturn(0);
1978 }
1979 
1980 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1981 
1982 /*@C
1983   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1984 
1985   Not Collective
1986 
1987   Level: advanced
1988 
1989 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1990 @*/
1991 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1992 {
1993   PetscErrorCode ierr;
1994 
1995   PetscFunctionBegin;
1996   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1997   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1998 
1999   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
2000   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
2001   PetscFunctionReturn(0);
2002 }
2003