xref: /petsc/src/vec/is/utils/isltog.c (revision a0d791250412d15268757af5c1aebedc81d76f5b)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47   PetscErrorCode ierr;
48 
49   PetscFunctionBeginHot;
50   *pStart = 0;
51   *points = NULL;
52   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
54   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
55   *pEnd   = *pStart + numCells;
56   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
57   PetscFunctionReturn(0);
58 }
59 
60 /*@C
61   ISRestorePointRange - Destroys the traversal description
62 
63   Not collective
64 
65   Input Parameters:
66 + pointIS - The IS object
67 . pStart  - The first index, from ISGetPointRange()
68 . pEnd    - One past the last index, from ISGetPointRange()
69 - points  - The indices, from ISGetPointRange()
70 
71   Notes:
72   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
73 $ ISGetPointRange(is, &pStart, &pEnd, &points);
74 $ for (p = pStart; p < pEnd; ++p) {
75 $   const PetscInt point = points ? points[p] : p;
76 $ }
77 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
78 
79   Level: intermediate
80 
81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
82 @*/
83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
84 {
85   PetscInt       step = 1;
86   PetscBool      isStride;
87   PetscErrorCode ierr;
88 
89   PetscFunctionBeginHot;
90   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
91   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
92   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
93   PetscFunctionReturn(0);
94 }
95 
96 /*@C
97   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
98 
99   Not collective
100 
101   Input Parameters:
102 + subpointIS - The IS object to be configured
103 . pStar   t  - The first index of the subrange
104 . pEnd       - One past the last index for the subrange
105 - points     - The indices for the entire range, from ISGetPointRange()
106 
107   Output Parameters:
108 . subpointIS - The IS object now configured to be a subrange
109 
110   Notes:
111   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
112 
113   Level: intermediate
114 
115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116 @*/
117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118 {
119   PetscErrorCode ierr;
120 
121   PetscFunctionBeginHot;
122   if (points) {
123     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
124     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
125   } else {
126     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
127     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 /* -----------------------------------------------------------------------------------------*/
133 
134 /*
135     Creates the global mapping information in the ISLocalToGlobalMapping structure
136 
137     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138 */
139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140 {
141   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   if (mapping->data) PetscFunctionReturn(0);
146   end   = 0;
147   start = PETSC_MAX_INT;
148 
149   for (i=0; i<n; i++) {
150     if (idx[i] < 0) continue;
151     if (idx[i] < start) start = idx[i];
152     if (idx[i] > end)   end   = idx[i];
153   }
154   if (start > end) {start = 0; end = -1;}
155   mapping->globalstart = start;
156   mapping->globalend   = end;
157   if (!((PetscObject)mapping)->type_name) {
158     if ((end - start) > PetscMax(4*n,1000000)) {
159       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
160     } else {
161       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
162     }
163   }
164   if (mapping->ops->globaltolocalmappingsetup) { ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr); }
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169 {
170   PetscErrorCode              ierr;
171   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172   ISLocalToGlobalMapping_Basic *map;
173 
174   PetscFunctionBegin;
175   start            = mapping->globalstart;
176   end              = mapping->globalend;
177   ierr             = PetscNew(&map);CHKERRQ(ierr);
178   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
179   map->globals     = globals;
180   for (i=0; i<end-start+1; i++) globals[i] = -1;
181   for (i=0; i<n; i++) {
182     if (idx[i] < 0) continue;
183     globals[idx[i] - start] = i;
184   }
185   mapping->data = (void*)map;
186   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191 {
192   PetscErrorCode              ierr;
193   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194   ISLocalToGlobalMapping_Hash *map;
195 
196   PetscFunctionBegin;
197   ierr = PetscNew(&map);CHKERRQ(ierr);
198   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
199   for (i=0; i<n; i++) {
200     if (idx[i] < 0) continue;
201     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
202   }
203   mapping->data = (void*)map;
204   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209 {
210   PetscErrorCode              ierr;
211   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
212 
213   PetscFunctionBegin;
214   if (!map) PetscFunctionReturn(0);
215   ierr = PetscFree(map->globals);CHKERRQ(ierr);
216   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221 {
222   PetscErrorCode              ierr;
223   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
224 
225   PetscFunctionBegin;
226   if (!map) PetscFunctionReturn(0);
227   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
228   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME _Basic
234 #define GTOLBS mapping->bs
235 #define GTOL(g, local) do {                  \
236     local = map->globals[g/bs - start];      \
237     if (local >= 0) local = bs*local + (g % bs); \
238   } while (0)
239 
240 #include <../src/vec/is/utils/isltog.h>
241 
242 #define GTOLTYPE _Basic
243 #define GTOLNAME Block_Basic
244 #define GTOLBS 1
245 #define GTOL(g, local) do {                  \
246     local = map->globals[g - start];         \
247   } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME _Hash
252 #define GTOLBS mapping->bs
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255     if (local >= 0) local = bs*local + (g % bs);   \
256    } while (0)
257 #include <../src/vec/is/utils/isltog.h>
258 
259 #define GTOLTYPE _Hash
260 #define GTOLNAME Block_Hash
261 #define GTOLBS 1
262 #define GTOL(g, local) do {                         \
263     (void)PetscHMapIGet(map->globalht,g,&local);    \
264   } while (0)
265 #include <../src/vec/is/utils/isltog.h>
266 
267 /*@
268     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
269 
270     Not Collective
271 
272     Input Parameter:
273 .   ltog - local to global mapping
274 
275     Output Parameter:
276 .   nltog - the duplicated local to global mapping
277 
278     Level: advanced
279 
280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281 @*/
282 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283 {
284   PetscErrorCode             ierr;
285   ISLocalToGlobalMappingType l2gtype;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
289   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
290   ierr = ISLocalToGlobalMappingGetType(ltog,&l2gtype);CHKERRQ(ierr);
291   ierr = ISLocalToGlobalMappingSetType(*nltog,l2gtype);CHKERRQ(ierr);
292   PetscFunctionReturn(0);
293 }
294 
295 /*@
296     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
297 
298     Not Collective
299 
300     Input Parameter:
301 .   ltog - local to global mapping
302 
303     Output Parameter:
304 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
305 
306     Level: advanced
307 
308 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
309 @*/
310 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
311 {
312   PetscFunctionBegin;
313   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
314   PetscValidIntPointer(n,2);
315   *n = mapping->bs*mapping->n;
316   PetscFunctionReturn(0);
317 }
318 
319 /*@C
320    ISLocalToGlobalMappingViewFromOptions - View from Options
321 
322    Collective on ISLocalToGlobalMapping
323 
324    Input Parameters:
325 +  A - the local to global mapping object
326 .  obj - Optional object
327 -  name - command line option
328 
329    Level: intermediate
330 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
331 @*/
332 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
333 {
334   PetscErrorCode ierr;
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
338   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
339   PetscFunctionReturn(0);
340 }
341 
342 /*@C
343     ISLocalToGlobalMappingView - View a local to global mapping
344 
345     Not Collective
346 
347     Input Parameters:
348 +   ltog - local to global mapping
349 -   viewer - viewer
350 
351     Level: advanced
352 
353 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
354 @*/
355 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
356 {
357   PetscInt       i;
358   PetscMPIInt    rank;
359   PetscBool      iascii;
360   PetscErrorCode ierr;
361 
362   PetscFunctionBegin;
363   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
364   if (!viewer) {
365     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
366   }
367   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
368 
369   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr);
370   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
371   if (iascii) {
372     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
373     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
374     for (i=0; i<mapping->n; i++) {
375       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
376     }
377     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
378     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
379   }
380   PetscFunctionReturn(0);
381 }
382 
383 /*@
384     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
385     ordering and a global parallel ordering.
386 
387     Not collective
388 
389     Input Parameter:
390 .   is - index set containing the global numbers for each local number
391 
392     Output Parameter:
393 .   mapping - new mapping data structure
394 
395     Notes:
396     the block size of the IS determines the block size of the mapping
397     Level: advanced
398 
399 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
400 @*/
401 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
402 {
403   PetscErrorCode ierr;
404   PetscInt       n,bs;
405   const PetscInt *indices;
406   MPI_Comm       comm;
407   PetscBool      isblock;
408 
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(is,IS_CLASSID,1);
411   PetscValidPointer(mapping,2);
412 
413   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
414   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
415   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
416   if (!isblock) {
417     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
418     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
419     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
420   } else {
421     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
422     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
423     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
424     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
425   }
426   PetscFunctionReturn(0);
427 }
428 
429 /*@C
430     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
431     ordering and a global parallel ordering.
432 
433     Collective
434 
435     Input Parameters:
436 +   sf - star forest mapping contiguous local indices to (rank, offset)
437 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
438 
439     Output Parameter:
440 .   mapping - new mapping data structure
441 
442     Level: advanced
443 
444     Notes:
445     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
446 
447 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
448 @*/
449 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
450 {
451   PetscErrorCode ierr;
452   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
453   const PetscInt *ilocal;
454   MPI_Comm       comm;
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
458   PetscValidPointer(mapping,3);
459 
460   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
461   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
462   if (start == PETSC_DECIDE) {
463     start = 0;
464     ierr = MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr);
465   } else PetscCheckFalse(start < 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
466   if (ilocal) {
467     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
468   }
469   else maxlocal = nleaves;
470   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
471   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
472   for (i=0; i<nroots; i++) globals[i] = start + i;
473   for (i=0; i<maxlocal; i++) ltog[i] = -1;
474   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
475   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
476   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
477   ierr = PetscFree(globals);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 /*@
482     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
483 
484     Not collective
485 
486     Input Parameters:
487 +   mapping - mapping data structure
488 -   bs - the blocksize
489 
490     Level: advanced
491 
492 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
493 @*/
494 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
495 {
496   PetscInt       *nid;
497   const PetscInt *oid;
498   PetscInt       i,cn,on,obs,nn;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
503   PetscCheckFalse(bs < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
504   if (bs == mapping->bs) PetscFunctionReturn(0);
505   on  = mapping->n;
506   obs = mapping->bs;
507   oid = mapping->indices;
508   nn  = (on*obs)/bs;
509   PetscCheckFalse((on*obs)%bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
510 
511   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
512   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
513   for (i=0;i<nn;i++) {
514     PetscInt j;
515     for (j=0,cn=0;j<bs-1;j++) {
516       if (oid[i*bs+j] < 0) { cn++; continue; }
517       PetscCheckFalse(oid[i*bs+j] != oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
518     }
519     if (oid[i*bs+j] < 0) cn++;
520     if (cn) {
521       PetscCheckFalse(cn != bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
522       nid[i] = -1;
523     } else {
524       nid[i] = oid[i*bs]/bs;
525     }
526   }
527   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
528 
529   mapping->n           = nn;
530   mapping->bs          = bs;
531   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
532   mapping->indices     = nid;
533   mapping->globalstart = 0;
534   mapping->globalend   = 0;
535 
536   /* reset the cached information */
537   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
538   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
539   if (mapping->info_indices) {
540     PetscInt i;
541 
542     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
543     for (i=1; i<mapping->info_nproc; i++) {
544       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
545     }
546     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
547   }
548   mapping->info_cached = PETSC_FALSE;
549 
550   if (mapping->ops->destroy) {
551     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 /*@
557     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
558     ordering and a global parallel ordering.
559 
560     Not Collective
561 
562     Input Parameters:
563 .   mapping - mapping data structure
564 
565     Output Parameter:
566 .   bs - the blocksize
567 
568     Level: advanced
569 
570 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
571 @*/
572 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
573 {
574   PetscFunctionBegin;
575   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
576   *bs = mapping->bs;
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
582     ordering and a global parallel ordering.
583 
584     Not Collective, but communicator may have more than one process
585 
586     Input Parameters:
587 +   comm - MPI communicator
588 .   bs - the block size
589 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
590 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
591 -   mode - see PetscCopyMode
592 
593     Output Parameter:
594 .   mapping - new mapping data structure
595 
596     Notes:
597     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
598 
599     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
600     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
601     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
602 
603     Level: advanced
604 
605 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
606           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
607 @*/
608 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
609 {
610   PetscErrorCode ierr;
611   PetscInt       *in;
612 
613   PetscFunctionBegin;
614   if (n) PetscValidIntPointer(indices,4);
615   PetscValidPointer(mapping,6);
616 
617   *mapping = NULL;
618   ierr = ISInitializePackage();CHKERRQ(ierr);
619 
620   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
621   (*mapping)->n  = n;
622   (*mapping)->bs = bs;
623   if (mode == PETSC_COPY_VALUES) {
624     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
625     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
626     (*mapping)->indices = in;
627     (*mapping)->dealloc_indices = PETSC_TRUE;
628     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
629   } else if (mode == PETSC_OWN_POINTER) {
630     (*mapping)->indices = (PetscInt*)indices;
631     (*mapping)->dealloc_indices = PETSC_TRUE;
632     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
633   } else if (mode == PETSC_USE_POINTER) {
634     (*mapping)->indices = (PetscInt*)indices;
635   }
636   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
637   PetscFunctionReturn(0);
638 }
639 
640 PetscFunctionList ISLocalToGlobalMappingList = NULL;
641 
642 /*@
643    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
644 
645    Not collective
646 
647    Input Parameters:
648 .  mapping - mapping data structure
649 
650    Level: advanced
651 
652 @*/
653 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
654 {
655   PetscErrorCode             ierr;
656   char                       type[256];
657   ISLocalToGlobalMappingType defaulttype = "Not set";
658   PetscBool                  flg;
659 
660   PetscFunctionBegin;
661   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
662   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
663   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
664   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
665   if (flg) {
666     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
667   }
668   ierr = PetscOptionsEnd();CHKERRQ(ierr);
669   PetscFunctionReturn(0);
670 }
671 
672 /*@
673    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
674    ordering and a global parallel ordering.
675 
676    Note Collective
677 
678    Input Parameters:
679 .  mapping - mapping data structure
680 
681    Level: advanced
682 
683 .seealso: ISLocalToGlobalMappingCreate()
684 @*/
685 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
686 {
687   PetscErrorCode ierr;
688 
689   PetscFunctionBegin;
690   if (!*mapping) PetscFunctionReturn(0);
691   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
692   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
693   if ((*mapping)->dealloc_indices) {
694     ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
695   }
696   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
697   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
698   if ((*mapping)->info_indices) {
699     PetscInt i;
700 
701     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
702     for (i=1; i<(*mapping)->info_nproc; i++) {
703       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
704     }
705     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
706   }
707   if ((*mapping)->info_nodei) {
708     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
709   }
710   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
711   if ((*mapping)->ops->destroy) {
712     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
713   }
714   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
715   *mapping = NULL;
716   PetscFunctionReturn(0);
717 }
718 
719 /*@
720     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
721     a new index set using the global numbering defined in an ISLocalToGlobalMapping
722     context.
723 
724     Collective on is
725 
726     Input Parameters:
727 +   mapping - mapping between local and global numbering
728 -   is - index set in local numbering
729 
730     Output Parameters:
731 .   newis - index set in global numbering
732 
733     Notes:
734     The output IS will have the same communicator of the input IS.
735 
736     Level: advanced
737 
738 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
739           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
740 @*/
741 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
742 {
743   PetscErrorCode ierr;
744   PetscInt       n,*idxout;
745   const PetscInt *idxin;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
749   PetscValidHeaderSpecific(is,IS_CLASSID,2);
750   PetscValidPointer(newis,3);
751 
752   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
753   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
754   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
755   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
756   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
757   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
758   PetscFunctionReturn(0);
759 }
760 
761 /*@
762    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
763    and converts them to the global numbering.
764 
765    Not collective
766 
767    Input Parameters:
768 +  mapping - the local to global mapping context
769 .  N - number of integers
770 -  in - input indices in local numbering
771 
772    Output Parameter:
773 .  out - indices in global numbering
774 
775    Notes:
776    The in and out array parameters may be identical.
777 
778    Level: advanced
779 
780 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
781           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
782           AOPetscToApplication(), ISGlobalToLocalMappingApply()
783 
784 @*/
785 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
786 {
787   PetscInt i,bs,Nmax;
788 
789   PetscFunctionBegin;
790   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
791   bs   = mapping->bs;
792   Nmax = bs*mapping->n;
793   if (bs == 1) {
794     const PetscInt *idx = mapping->indices;
795     for (i=0; i<N; i++) {
796       if (in[i] < 0) {
797         out[i] = in[i];
798         continue;
799       }
800       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
801       out[i] = idx[in[i]];
802     }
803   } else {
804     const PetscInt *idx = mapping->indices;
805     for (i=0; i<N; i++) {
806       if (in[i] < 0) {
807         out[i] = in[i];
808         continue;
809       }
810       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
811       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
812     }
813   }
814   PetscFunctionReturn(0);
815 }
816 
817 /*@
818    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
819 
820    Not collective
821 
822    Input Parameters:
823 +  mapping - the local to global mapping context
824 .  N - number of integers
825 -  in - input indices in local block numbering
826 
827    Output Parameter:
828 .  out - indices in global block numbering
829 
830    Notes:
831    The in and out array parameters may be identical.
832 
833    Example:
834      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
835      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
836 
837    Level: advanced
838 
839 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
840           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
841           AOPetscToApplication(), ISGlobalToLocalMappingApply()
842 
843 @*/
844 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
845 {
846   PetscInt       i,Nmax = mapping->n;
847   const PetscInt *idx = mapping->indices;
848 
849   PetscFunctionBegin;
850   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
851   for (i=0; i<N; i++) {
852     if (in[i] < 0) {
853       out[i] = in[i];
854       continue;
855     }
856     PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
857     out[i] = idx[in[i]];
858   }
859   PetscFunctionReturn(0);
860 }
861 
862 /*@
863     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
864     specified with a global numbering.
865 
866     Not collective
867 
868     Input Parameters:
869 +   mapping - mapping between local and global numbering
870 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
871            IS_GTOLM_DROP - drops the indices with no local value from the output list
872 .   n - number of global indices to map
873 -   idx - global indices to map
874 
875     Output Parameters:
876 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
877 -   idxout - local index of each global index, one must pass in an array long enough
878              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
879              idxout == NULL to determine the required length (returned in nout)
880              and then allocate the required space and call ISGlobalToLocalMappingApply()
881              a second time to set the values.
882 
883     Notes:
884     Either nout or idxout may be NULL. idx and idxout may be identical.
885 
886     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
887     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
888     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
889 
890     Level: advanced
891 
892     Developer Note: The manual page states that idx and idxout may be identical but the calling
893        sequence declares idx as const so it cannot be the same as idxout.
894 
895 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
896           ISLocalToGlobalMappingDestroy()
897 @*/
898 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
899 {
900   PetscErrorCode ierr;
901 
902   PetscFunctionBegin;
903   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
904   if (!mapping->data) {
905     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
906   }
907   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910 
911 /*@
912     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
913     a new index set using the local numbering defined in an ISLocalToGlobalMapping
914     context.
915 
916     Not collective
917 
918     Input Parameters:
919 +   mapping - mapping between local and global numbering
920 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
921            IS_GTOLM_DROP - drops the indices with no local value from the output list
922 -   is - index set in global numbering
923 
924     Output Parameters:
925 .   newis - index set in local numbering
926 
927     Notes:
928     The output IS will be sequential, as it encodes a purely local operation
929 
930     Level: advanced
931 
932 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
933           ISLocalToGlobalMappingDestroy()
934 @*/
935 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
936 {
937   PetscErrorCode ierr;
938   PetscInt       n,nout,*idxout;
939   const PetscInt *idxin;
940 
941   PetscFunctionBegin;
942   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
943   PetscValidHeaderSpecific(is,IS_CLASSID,3);
944   PetscValidPointer(newis,4);
945 
946   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
947   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
948   if (type == IS_GTOLM_MASK) {
949     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
950   } else {
951     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
952     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
953   }
954   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
955   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
956   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
957   PetscFunctionReturn(0);
958 }
959 
960 /*@
961     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
962     specified with a block global numbering.
963 
964     Not collective
965 
966     Input Parameters:
967 +   mapping - mapping between local and global numbering
968 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
969            IS_GTOLM_DROP - drops the indices with no local value from the output list
970 .   n - number of global indices to map
971 -   idx - global indices to map
972 
973     Output Parameters:
974 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
975 -   idxout - local index of each global index, one must pass in an array long enough
976              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
977              idxout == NULL to determine the required length (returned in nout)
978              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
979              a second time to set the values.
980 
981     Notes:
982     Either nout or idxout may be NULL. idx and idxout may be identical.
983 
984     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
985     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
986     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
987 
988     Level: advanced
989 
990     Developer Note: The manual page states that idx and idxout may be identical but the calling
991        sequence declares idx as const so it cannot be the same as idxout.
992 
993 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
994           ISLocalToGlobalMappingDestroy()
995 @*/
996 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
997                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
998 {
999   PetscErrorCode ierr;
1000 
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1003   if (!mapping->data) {
1004     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
1005   }
1006   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 /*@C
1011     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1012      each index shared by more than one processor
1013 
1014     Collective on ISLocalToGlobalMapping
1015 
1016     Input Parameter:
1017 .   mapping - the mapping from local to global indexing
1018 
1019     Output Parameters:
1020 +   nproc - number of processors that are connected to this one
1021 .   proc - neighboring processors
1022 .   numproc - number of indices for each subdomain (processor)
1023 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1024 
1025     Level: advanced
1026 
1027     Fortran Usage:
1028 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1029 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1030           PetscInt indices[nproc][numprocmax],ierr)
1031         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1032         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1033 
1034 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1035           ISLocalToGlobalMappingRestoreInfo()
1036 @*/
1037 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1038 {
1039   PetscErrorCode ierr;
1040 
1041   PetscFunctionBegin;
1042   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1043   if (mapping->info_cached) {
1044     *nproc    = mapping->info_nproc;
1045     *procs    = mapping->info_procs;
1046     *numprocs = mapping->info_numprocs;
1047     *indices  = mapping->info_indices;
1048   } else {
1049     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1050   }
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1055 {
1056   PetscErrorCode ierr;
1057   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1058   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1059   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1060   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1061   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1062   PetscInt       first_procs,first_numprocs,*first_indices;
1063   MPI_Request    *recv_waits,*send_waits;
1064   MPI_Status     recv_status,*send_status,*recv_statuses;
1065   MPI_Comm       comm;
1066   PetscBool      debug = PETSC_FALSE;
1067 
1068   PetscFunctionBegin;
1069   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1070   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1071   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1072   if (size == 1) {
1073     *nproc         = 0;
1074     *procs         = NULL;
1075     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1076     (*numprocs)[0] = 0;
1077     ierr           = PetscNew(indices);CHKERRQ(ierr);
1078     (*indices)[0]  = NULL;
1079     /* save info for reuse */
1080     mapping->info_nproc = *nproc;
1081     mapping->info_procs = *procs;
1082     mapping->info_numprocs = *numprocs;
1083     mapping->info_indices = *indices;
1084     mapping->info_cached = PETSC_TRUE;
1085     PetscFunctionReturn(0);
1086   }
1087 
1088   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1089 
1090   /*
1091     Notes on ISLocalToGlobalMappingGetBlockInfo
1092 
1093     globally owned node - the nodes that have been assigned to this processor in global
1094            numbering, just for this routine.
1095 
1096     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1097            boundary (i.e. is has more than one local owner)
1098 
1099     locally owned node - node that exists on this processors subdomain
1100 
1101     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1102            local subdomain
1103   */
1104   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1105   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1106   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1107 
1108   for (i=0; i<n; i++) {
1109     if (lindices[i] > max) max = lindices[i];
1110   }
1111   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRMPI(ierr);
1112   Ng++;
1113   ierr   = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1114   ierr   = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1115   scale  = Ng/size + 1;
1116   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1117   rstart = scale*rank;
1118 
1119   /* determine ownership ranges of global indices */
1120   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1121   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1122 
1123   /* determine owners of each local node  */
1124   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1125   for (i=0; i<n; i++) {
1126     proc             = lindices[i]/scale; /* processor that globally owns this index */
1127     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1128     owner[i]         = proc;
1129     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1130   }
1131   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1132   ierr = PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends);CHKERRQ(ierr);
1133 
1134   /* inform other processors of number of messages and max length*/
1135   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1136   ierr = PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs);CHKERRQ(ierr);
1137 
1138   /* post receives for owned rows */
1139   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1140   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1141   for (i=0; i<nrecvs; i++) {
1142     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr);
1143   }
1144 
1145   /* pack messages containing lists of local nodes to owners */
1146   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1147   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1148   starts[0] = 0;
1149   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1150   for (i=0; i<n; i++) {
1151     sends[starts[owner[i]]++] = lindices[i];
1152     sends[starts[owner[i]]++] = i;
1153   }
1154   ierr = PetscFree(owner);CHKERRQ(ierr);
1155   starts[0] = 0;
1156   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1157 
1158   /* send the messages */
1159   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1160   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1161   cnt = 0;
1162   for (i=0; i<size; i++) {
1163     if (nprocs[2*i]) {
1164       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr);
1165       dest[cnt] = i;
1166       cnt++;
1167     }
1168   }
1169   ierr = PetscFree(starts);CHKERRQ(ierr);
1170 
1171   /* wait on receives */
1172   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1173   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1174   cnt  = nrecvs;
1175   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1176   while (cnt) {
1177     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
1178     /* unpack receives into our local space */
1179     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr);
1180     source[imdex] = recv_status.MPI_SOURCE;
1181     len[imdex]    = len[imdex]/2;
1182     /* count how many local owners for each of my global owned indices */
1183     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1184     cnt--;
1185   }
1186   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1187 
1188   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1189   nowned  = 0;
1190   nownedm = 0;
1191   for (i=0; i<ng; i++) {
1192     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1193   }
1194 
1195   /* create single array to contain rank of all local owners of each globally owned index */
1196   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1197   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1198   starts[0] = 0;
1199   for (i=1; i<ng; i++) {
1200     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1201     else starts[i] = starts[i-1];
1202   }
1203 
1204   /* for each nontrival globally owned node list all arriving processors */
1205   for (i=0; i<nrecvs; i++) {
1206     for (j=0; j<len[i]; j++) {
1207       node = recvs[2*i*nmax+2*j]-rstart;
1208       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1209     }
1210   }
1211 
1212   if (debug) { /* -----------------------------------  */
1213     starts[0] = 0;
1214     for (i=1; i<ng; i++) {
1215       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1216       else starts[i] = starts[i-1];
1217     }
1218     for (i=0; i<ng; i++) {
1219       if (nownedsenders[i] > 1) {
1220         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1221         for (j=0; j<nownedsenders[i]; j++) {
1222           ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1223         }
1224         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1225       }
1226     }
1227     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1228   } /* -----------------------------------  */
1229 
1230   /* wait on original sends */
1231   if (nsends) {
1232     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1233     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
1234     ierr = PetscFree(send_status);CHKERRQ(ierr);
1235   }
1236   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1237   ierr = PetscFree(sends);CHKERRQ(ierr);
1238   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1239 
1240   /* pack messages to send back to local owners */
1241   starts[0] = 0;
1242   for (i=1; i<ng; i++) {
1243     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1244     else starts[i] = starts[i-1];
1245   }
1246   nsends2 = nrecvs;
1247   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1248   for (i=0; i<nrecvs; i++) {
1249     nprocs[i] = 1;
1250     for (j=0; j<len[i]; j++) {
1251       node = recvs[2*i*nmax+2*j]-rstart;
1252       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1253     }
1254   }
1255   nt = 0;
1256   for (i=0; i<nsends2; i++) nt += nprocs[i];
1257 
1258   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1259   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1260 
1261   starts2[0] = 0;
1262   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1263   /*
1264      Each message is 1 + nprocs[i] long, and consists of
1265        (0) the number of nodes being sent back
1266        (1) the local node number,
1267        (2) the number of processors sharing it,
1268        (3) the processors sharing it
1269   */
1270   for (i=0; i<nsends2; i++) {
1271     cnt = 1;
1272     sends2[starts2[i]] = 0;
1273     for (j=0; j<len[i]; j++) {
1274       node = recvs[2*i*nmax+2*j]-rstart;
1275       if (nownedsenders[node] > 1) {
1276         sends2[starts2[i]]++;
1277         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1278         sends2[starts2[i]+cnt++] = nownedsenders[node];
1279         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1280         cnt += nownedsenders[node];
1281       }
1282     }
1283   }
1284 
1285   /* receive the message lengths */
1286   nrecvs2 = nsends;
1287   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1288   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1289   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1290   for (i=0; i<nrecvs2; i++) {
1291     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr);
1292   }
1293 
1294   /* send the message lengths */
1295   for (i=0; i<nsends2; i++) {
1296     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr);
1297   }
1298 
1299   /* wait on receives of lens */
1300   if (nrecvs2) {
1301     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1302     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1303     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1304   }
1305   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1306 
1307   starts3[0] = 0;
1308   nt         = 0;
1309   for (i=0; i<nrecvs2-1; i++) {
1310     starts3[i+1] = starts3[i] + lens2[i];
1311     nt          += lens2[i];
1312   }
1313   if (nrecvs2) nt += lens2[nrecvs2-1];
1314 
1315   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1316   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1317   for (i=0; i<nrecvs2; i++) {
1318     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr);
1319   }
1320 
1321   /* send the messages */
1322   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1323   for (i=0; i<nsends2; i++) {
1324     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr);
1325   }
1326 
1327   /* wait on receives */
1328   if (nrecvs2) {
1329     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1330     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1331     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1332   }
1333   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1334   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1335 
1336   if (debug) { /* -----------------------------------  */
1337     cnt = 0;
1338     for (i=0; i<nrecvs2; i++) {
1339       nt = recvs2[cnt++];
1340       for (j=0; j<nt; j++) {
1341         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1342         for (k=0; k<recvs2[cnt+1]; k++) {
1343           ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1344         }
1345         cnt += 2 + recvs2[cnt+1];
1346         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1347       }
1348     }
1349     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1350   } /* -----------------------------------  */
1351 
1352   /* count number subdomains for each local node */
1353   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1354   cnt  = 0;
1355   for (i=0; i<nrecvs2; i++) {
1356     nt = recvs2[cnt++];
1357     for (j=0; j<nt; j++) {
1358       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1359       cnt += 2 + recvs2[cnt+1];
1360     }
1361   }
1362   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1363   *nproc    = nt;
1364   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1365   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1366   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1367   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1368   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1369   cnt  = 0;
1370   for (i=0; i<size; i++) {
1371     if (nprocs[i] > 0) {
1372       bprocs[i]        = cnt;
1373       (*procs)[cnt]    = i;
1374       (*numprocs)[cnt] = nprocs[i];
1375       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1376       cnt++;
1377     }
1378   }
1379 
1380   /* make the list of subdomains for each nontrivial local node */
1381   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1382   cnt  = 0;
1383   for (i=0; i<nrecvs2; i++) {
1384     nt = recvs2[cnt++];
1385     for (j=0; j<nt; j++) {
1386       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1387       cnt += 2 + recvs2[cnt+1];
1388     }
1389   }
1390   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1391   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1392 
1393   /* sort the node indexing by their global numbers */
1394   nt = *nproc;
1395   for (i=0; i<nt; i++) {
1396     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1397     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1398     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1399     ierr = PetscFree(tmp);CHKERRQ(ierr);
1400   }
1401 
1402   if (debug) { /* -----------------------------------  */
1403     nt = *nproc;
1404     for (i=0; i<nt; i++) {
1405       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1406       for (j=0; j<(*numprocs)[i]; j++) {
1407         ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]);CHKERRQ(ierr);
1408       }
1409       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1410     }
1411     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1412   } /* -----------------------------------  */
1413 
1414   /* wait on sends */
1415   if (nsends2) {
1416     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1417     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr);
1418     ierr = PetscFree(send_status);CHKERRQ(ierr);
1419   }
1420 
1421   ierr = PetscFree(starts3);CHKERRQ(ierr);
1422   ierr = PetscFree(dest);CHKERRQ(ierr);
1423   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1424 
1425   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1426   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1427   ierr = PetscFree(starts);CHKERRQ(ierr);
1428   ierr = PetscFree(starts2);CHKERRQ(ierr);
1429   ierr = PetscFree(lens2);CHKERRQ(ierr);
1430 
1431   ierr = PetscFree(source);CHKERRQ(ierr);
1432   ierr = PetscFree(len);CHKERRQ(ierr);
1433   ierr = PetscFree(recvs);CHKERRQ(ierr);
1434   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1435   ierr = PetscFree(sends2);CHKERRQ(ierr);
1436 
1437   /* put the information about myself as the first entry in the list */
1438   first_procs    = (*procs)[0];
1439   first_numprocs = (*numprocs)[0];
1440   first_indices  = (*indices)[0];
1441   for (i=0; i<*nproc; i++) {
1442     if ((*procs)[i] == rank) {
1443       (*procs)[0]    = (*procs)[i];
1444       (*numprocs)[0] = (*numprocs)[i];
1445       (*indices)[0]  = (*indices)[i];
1446       (*procs)[i]    = first_procs;
1447       (*numprocs)[i] = first_numprocs;
1448       (*indices)[i]  = first_indices;
1449       break;
1450     }
1451   }
1452 
1453   /* save info for reuse */
1454   mapping->info_nproc = *nproc;
1455   mapping->info_procs = *procs;
1456   mapping->info_numprocs = *numprocs;
1457   mapping->info_indices = *indices;
1458   mapping->info_cached = PETSC_TRUE;
1459   PetscFunctionReturn(0);
1460 }
1461 
1462 /*@C
1463     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1464 
1465     Collective on ISLocalToGlobalMapping
1466 
1467     Input Parameter:
1468 .   mapping - the mapping from local to global indexing
1469 
1470     Output Parameters:
1471 +   nproc - number of processors that are connected to this one
1472 .   proc - neighboring processors
1473 .   numproc - number of indices for each processor
1474 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1475 
1476     Level: advanced
1477 
1478 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1479           ISLocalToGlobalMappingGetInfo()
1480 @*/
1481 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1482 {
1483   PetscErrorCode ierr;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1487   if (mapping->info_free) {
1488     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1489     if (*indices) {
1490       PetscInt i;
1491 
1492       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1493       for (i=1; i<*nproc; i++) {
1494         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1495       }
1496       ierr = PetscFree(*indices);CHKERRQ(ierr);
1497     }
1498   }
1499   *nproc    = 0;
1500   *procs    = NULL;
1501   *numprocs = NULL;
1502   *indices  = NULL;
1503   PetscFunctionReturn(0);
1504 }
1505 
1506 /*@C
1507     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1508      each index shared by more than one processor
1509 
1510     Collective on ISLocalToGlobalMapping
1511 
1512     Input Parameter:
1513 .   mapping - the mapping from local to global indexing
1514 
1515     Output Parameters:
1516 +   nproc - number of processors that are connected to this one
1517 .   proc - neighboring processors
1518 .   numproc - number of indices for each subdomain (processor)
1519 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1520 
1521     Level: advanced
1522 
1523     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1524 
1525     Fortran Usage:
1526 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1527 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1528           PetscInt indices[nproc][numprocmax],ierr)
1529         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1530         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1531 
1532 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1533           ISLocalToGlobalMappingRestoreInfo()
1534 @*/
1535 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1536 {
1537   PetscErrorCode ierr;
1538   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1539 
1540   PetscFunctionBegin;
1541   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1542   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1543   if (bs > 1) { /* we need to expand the cached info */
1544     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1545     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1546     for (i=0; i<*nproc; i++) {
1547       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1548       for (j=0; j<bnumprocs[i]; j++) {
1549         for (k=0; k<bs; k++) {
1550           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1551         }
1552       }
1553       (*numprocs)[i] = bnumprocs[i]*bs;
1554     }
1555     mapping->info_free = PETSC_TRUE;
1556   } else {
1557     *numprocs = bnumprocs;
1558     *indices  = bindices;
1559   }
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 /*@C
1564     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1565 
1566     Collective on ISLocalToGlobalMapping
1567 
1568     Input Parameter:
1569 .   mapping - the mapping from local to global indexing
1570 
1571     Output Parameters:
1572 +   nproc - number of processors that are connected to this one
1573 .   proc - neighboring processors
1574 .   numproc - number of indices for each processor
1575 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1576 
1577     Level: advanced
1578 
1579 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1580           ISLocalToGlobalMappingGetInfo()
1581 @*/
1582 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1583 {
1584   PetscErrorCode ierr;
1585 
1586   PetscFunctionBegin;
1587   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1588   PetscFunctionReturn(0);
1589 }
1590 
1591 /*@C
1592     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1593 
1594     Collective on ISLocalToGlobalMapping
1595 
1596     Input Parameter:
1597 .   mapping - the mapping from local to global indexing
1598 
1599     Output Parameters:
1600 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1601 .   count - number of neighboring processors per node
1602 -   indices - indices of processes sharing the node (sorted)
1603 
1604     Level: advanced
1605 
1606     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1607 
1608 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1609           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1610 @*/
1611 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1612 {
1613   PetscInt       n;
1614   PetscErrorCode ierr;
1615 
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1618   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1619   if (!mapping->info_nodec) {
1620     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1621 
1622     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1623     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1624     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1625     m = n;
1626     mapping->info_nodec[n] = 0;
1627     for (i=1;i<n_neigh;i++) {
1628       PetscInt j;
1629 
1630       m += n_shared[i];
1631       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1632     }
1633     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1634     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1635     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1636     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1637     for (i=1;i<n_neigh;i++) {
1638       PetscInt j;
1639 
1640       for (j=0;j<n_shared[i];j++) {
1641         PetscInt k = shared[i][j];
1642 
1643         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1644         mapping->info_nodec[k] += 1;
1645       }
1646     }
1647     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1648     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1649   }
1650   if (nnodes)  *nnodes  = n;
1651   if (count)   *count   = mapping->info_nodec;
1652   if (indices) *indices = mapping->info_nodei;
1653   PetscFunctionReturn(0);
1654 }
1655 
1656 /*@C
1657     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1658 
1659     Collective on ISLocalToGlobalMapping
1660 
1661     Input Parameter:
1662 .   mapping - the mapping from local to global indexing
1663 
1664     Output Parameters:
1665 +   nnodes - number of local nodes
1666 .   count - number of neighboring processors per node
1667 -   indices - indices of processes sharing the node (sorted)
1668 
1669     Level: advanced
1670 
1671 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1672           ISLocalToGlobalMappingGetInfo()
1673 @*/
1674 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1675 {
1676   PetscFunctionBegin;
1677   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1678   if (nnodes)  *nnodes  = 0;
1679   if (count)   *count   = NULL;
1680   if (indices) *indices = NULL;
1681   PetscFunctionReturn(0);
1682 }
1683 
1684 /*@C
1685    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1686 
1687    Not Collective
1688 
1689    Input Parameter:
1690 . ltog - local to global mapping
1691 
1692    Output Parameter:
1693 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1694 
1695    Level: advanced
1696 
1697    Notes:
1698     ISLocalToGlobalMappingGetSize() returns the length the this array
1699 
1700 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1701 @*/
1702 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1703 {
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1706   PetscValidPointer(array,2);
1707   if (ltog->bs == 1) {
1708     *array = ltog->indices;
1709   } else {
1710     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1711     const PetscInt *ii;
1712     PetscErrorCode ierr;
1713 
1714     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1715     *array = jj;
1716     k    = 0;
1717     ii   = ltog->indices;
1718     for (i=0; i<n; i++)
1719       for (j=0; j<bs; j++)
1720         jj[k++] = bs*ii[i] + j;
1721   }
1722   PetscFunctionReturn(0);
1723 }
1724 
1725 /*@C
1726    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1727 
1728    Not Collective
1729 
1730    Input Parameters:
1731 + ltog - local to global mapping
1732 - array - array of indices
1733 
1734    Level: advanced
1735 
1736 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1737 @*/
1738 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1739 {
1740   PetscFunctionBegin;
1741   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1742   PetscValidPointer(array,2);
1743   PetscCheckFalse(ltog->bs == 1 && *array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1744 
1745   if (ltog->bs > 1) {
1746     PetscErrorCode ierr;
1747     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1748   }
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 /*@C
1753    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1754 
1755    Not Collective
1756 
1757    Input Parameter:
1758 . ltog - local to global mapping
1759 
1760    Output Parameter:
1761 . array - array of indices
1762 
1763    Level: advanced
1764 
1765 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1766 @*/
1767 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1768 {
1769   PetscFunctionBegin;
1770   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1771   PetscValidPointer(array,2);
1772   *array = ltog->indices;
1773   PetscFunctionReturn(0);
1774 }
1775 
1776 /*@C
1777    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1778 
1779    Not Collective
1780 
1781    Input Parameters:
1782 + ltog - local to global mapping
1783 - array - array of indices
1784 
1785    Level: advanced
1786 
1787 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1788 @*/
1789 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1790 {
1791   PetscFunctionBegin;
1792   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1793   PetscValidPointer(array,2);
1794   PetscCheckFalse(*array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1795   *array = NULL;
1796   PetscFunctionReturn(0);
1797 }
1798 
1799 /*@C
1800    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1801 
1802    Not Collective
1803 
1804    Input Parameters:
1805 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1806 . n - number of mappings to concatenate
1807 - ltogs - local to global mappings
1808 
1809    Output Parameter:
1810 . ltogcat - new mapping
1811 
1812    Note: this currently always returns a mapping with block size of 1
1813 
1814    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1815 
1816    Level: advanced
1817 
1818 .seealso: ISLocalToGlobalMappingCreate()
1819 @*/
1820 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1821 {
1822   PetscInt       i,cnt,m,*idx;
1823   PetscErrorCode ierr;
1824 
1825   PetscFunctionBegin;
1826   PetscCheckFalse(n < 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1827   if (n > 0) PetscValidPointer(ltogs,3);
1828   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1829   PetscValidPointer(ltogcat,4);
1830   for (cnt=0,i=0; i<n; i++) {
1831     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1832     cnt += m;
1833   }
1834   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1835   for (cnt=0,i=0; i<n; i++) {
1836     const PetscInt *subidx;
1837     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1838     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1839     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1840     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1841     cnt += m;
1842   }
1843   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 /*MC
1848       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1849                                     used this is good for only small and moderate size problems.
1850 
1851    Options Database Keys:
1852 .   -islocaltoglobalmapping_type basic - select this method
1853 
1854    Level: beginner
1855 
1856 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1857 M*/
1858 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1859 {
1860   PetscFunctionBegin;
1861   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1862   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1863   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1864   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1865   PetscFunctionReturn(0);
1866 }
1867 
1868 /*MC
1869       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1870                                     used this is good for large memory problems.
1871 
1872    Options Database Keys:
1873 .   -islocaltoglobalmapping_type hash - select this method
1874 
1875    Notes:
1876     This is selected automatically for large problems if the user does not set the type.
1877 
1878    Level: beginner
1879 
1880 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1881 M*/
1882 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1883 {
1884   PetscFunctionBegin;
1885   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1886   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1887   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1888   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1889   PetscFunctionReturn(0);
1890 }
1891 
1892 /*@C
1893     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1894 
1895    Not Collective
1896 
1897    Input Parameters:
1898 +  sname - name of a new method
1899 -  routine_create - routine to create method context
1900 
1901    Notes:
1902    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1903 
1904    Sample usage:
1905 .vb
1906    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1907 .ve
1908 
1909    Then, your mapping can be chosen with the procedural interface via
1910 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1911    or at runtime via the option
1912 $     -islocaltoglobalmapping_type my_mapper
1913 
1914    Level: advanced
1915 
1916 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1917 
1918 @*/
1919 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1920 {
1921   PetscErrorCode ierr;
1922 
1923   PetscFunctionBegin;
1924   ierr = ISInitializePackage();CHKERRQ(ierr);
1925   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1926   PetscFunctionReturn(0);
1927 }
1928 
1929 /*@C
1930    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1931 
1932    Logically Collective on ISLocalToGlobalMapping
1933 
1934    Input Parameters:
1935 +  ltog - the ISLocalToGlobalMapping object
1936 -  type - a known method
1937 
1938    Options Database Key:
1939 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1940     of available methods (for instance, basic or hash)
1941 
1942    Notes:
1943    See "petsc/include/petscis.h" for available methods
1944 
1945   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1946   then set the ISLocalToGlobalMapping type from the options database rather than by using
1947   this routine.
1948 
1949   Level: intermediate
1950 
1951   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1952   are accessed by ISLocalToGlobalMappingSetType().
1953 
1954 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingGetType()
1955 @*/
1956 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1957 {
1958   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping) = NULL;
1959   PetscBool      match;
1960 
1961   PetscFunctionBegin;
1962   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1963   if (type) PetscValidCharPointer(type,2);
1964 
1965   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1966   if (match) PetscFunctionReturn(0);
1967 
1968   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1969   if (type) {
1970     ierr = PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1971     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1972   }
1973   /* Destroy the previous private LTOG context */
1974   if (ltog->ops->destroy) {
1975     ierr = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1976     ltog->ops->destroy = NULL;
1977   }
1978   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1979   if (r) { ierr = (*r)(ltog);CHKERRQ(ierr); }
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 /*@C
1984    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1985 
1986    Not Collective
1987 
1988    Input Parameter:
1989 .  ltog - the ISLocalToGlobalMapping object
1990 
1991    Output Parameter:
1992 .  type - the type
1993 
1994 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType()
1995 @*/
1996 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
1997 {
1998   PetscFunctionBegin;
1999   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
2000   PetscValidPointer(type,2);
2001   *type = ((PetscObject)ltog)->type_name;
2002   PetscFunctionReturn(0);
2003 }
2004 
2005 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
2006 
2007 /*@C
2008   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
2009 
2010   Not Collective
2011 
2012   Level: advanced
2013 
2014 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
2015 @*/
2016 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
2017 {
2018   PetscErrorCode ierr;
2019 
2020   PetscFunctionBegin;
2021   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
2022   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
2023   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
2024   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
2025   PetscFunctionReturn(0);
2026 }
2027