xref: /petsc/src/vec/is/utils/isltog.c (revision 91e63d38360eb9bc922f79d792328cc4769c01ac)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 /*@C
19   ISGetPointRange - Returns a description of the points in an IS suitable for traversal
20 
21   Not collective
22 
23   Input Parameter:
24 . pointIS - The IS object
25 
26   Output Parameters:
27 + pStart - The first index, see notes
28 . pEnd   - One past the last index, see notes
29 - points - The indices, see notes
30 
31   Notes:
32   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
33 $ ISGetPointRange(is, &pStart, &pEnd, &points);
34 $ for (p = pStart; p < pEnd; ++p) {
35 $   const PetscInt point = points ? points[p] : p;
36 $ }
37 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
38 
39   Level: intermediate
40 
41 .seealso: ISRestorePointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
42 @*/
43 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
44 {
45   PetscInt       numCells, step = 1;
46   PetscBool      isStride;
47   PetscErrorCode ierr;
48 
49   PetscFunctionBeginHot;
50   *pStart = 0;
51   *points = NULL;
52   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
53   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
54   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
55   *pEnd   = *pStart + numCells;
56   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
57   PetscFunctionReturn(0);
58 }
59 
60 /*@C
61   ISRestorePointRange - Destroys the traversal description
62 
63   Not collective
64 
65   Input Parameters:
66 + pointIS - The IS object
67 . pStart  - The first index, from ISGetPointRange()
68 . pEnd    - One past the last index, from ISGetPointRange()
69 - points  - The indices, from ISGetPointRange()
70 
71   Notes:
72   If the IS contains contiguous indices in an ISSTRIDE, then the indices are contained in [pStart, pEnd) and points = NULL. Otherwise, pStart = 0, pEnd = numIndices, and points is an array of the indices. This supports the following pattern
73 $ ISGetPointRange(is, &pStart, &pEnd, &points);
74 $ for (p = pStart; p < pEnd; ++p) {
75 $   const PetscInt point = points ? points[p] : p;
76 $ }
77 $ ISRestorePointRange(is, &pstart, &pEnd, &points);
78 
79   Level: intermediate
80 
81 .seealso: ISGetPointRange(), ISGetPointSubrange(), ISGetIndices(), ISCreateStride()
82 @*/
83 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
84 {
85   PetscInt       step = 1;
86   PetscBool      isStride;
87   PetscErrorCode ierr;
88 
89   PetscFunctionBeginHot;
90   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
91   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
92   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
93   PetscFunctionReturn(0);
94 }
95 
96 /*@C
97   ISGetPointSubrange - Configures the input IS to be a subrange for the traversal information given
98 
99   Not collective
100 
101   Input Parameters:
102 + subpointIS - The IS object to be configured
103 . pStar   t  - The first index of the subrange
104 . pEnd       - One past the last index for the subrange
105 - points     - The indices for the entire range, from ISGetPointRange()
106 
107   Output Parameters:
108 . subpointIS - The IS object now configured to be a subrange
109 
110   Notes:
111   The input IS will now respond properly to calls to ISGetPointRange() and return the subrange.
112 
113   Level: intermediate
114 
115 .seealso: ISGetPointRange(), ISRestorePointRange(), ISGetIndices(), ISCreateStride()
116 @*/
117 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
118 {
119   PetscErrorCode ierr;
120 
121   PetscFunctionBeginHot;
122   if (points) {
123     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
124     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
125   } else {
126     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
127     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
128   }
129   PetscFunctionReturn(0);
130 }
131 
132 /* -----------------------------------------------------------------------------------------*/
133 
134 /*
135     Creates the global mapping information in the ISLocalToGlobalMapping structure
136 
137     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
138 */
139 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
140 {
141   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   if (mapping->data) PetscFunctionReturn(0);
146   end   = 0;
147   start = PETSC_MAX_INT;
148 
149   for (i=0; i<n; i++) {
150     if (idx[i] < 0) continue;
151     if (idx[i] < start) start = idx[i];
152     if (idx[i] > end)   end   = idx[i];
153   }
154   if (start > end) {start = 0; end = -1;}
155   mapping->globalstart = start;
156   mapping->globalend   = end;
157   if (!((PetscObject)mapping)->type_name) {
158     if ((end - start) > PetscMax(4*n,1000000)) {
159       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
160     } else {
161       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
162     }
163   }
164   if (mapping->ops->globaltolocalmappingsetup) { ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr); }
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
169 {
170   PetscErrorCode              ierr;
171   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
172   ISLocalToGlobalMapping_Basic *map;
173 
174   PetscFunctionBegin;
175   start            = mapping->globalstart;
176   end              = mapping->globalend;
177   ierr             = PetscNew(&map);CHKERRQ(ierr);
178   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
179   map->globals     = globals;
180   for (i=0; i<end-start+1; i++) globals[i] = -1;
181   for (i=0; i<n; i++) {
182     if (idx[i] < 0) continue;
183     globals[idx[i] - start] = i;
184   }
185   mapping->data = (void*)map;
186   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
191 {
192   PetscErrorCode              ierr;
193   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
194   ISLocalToGlobalMapping_Hash *map;
195 
196   PetscFunctionBegin;
197   ierr = PetscNew(&map);CHKERRQ(ierr);
198   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
199   for (i=0; i<n; i++) {
200     if (idx[i] < 0) continue;
201     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
202   }
203   mapping->data = (void*)map;
204   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
205   PetscFunctionReturn(0);
206 }
207 
208 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
209 {
210   PetscErrorCode              ierr;
211   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
212 
213   PetscFunctionBegin;
214   if (!map) PetscFunctionReturn(0);
215   ierr = PetscFree(map->globals);CHKERRQ(ierr);
216   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
221 {
222   PetscErrorCode              ierr;
223   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
224 
225   PetscFunctionBegin;
226   if (!map) PetscFunctionReturn(0);
227   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
228   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #define GTOLTYPE _Basic
233 #define GTOLNAME _Basic
234 #define GTOLBS mapping->bs
235 #define GTOL(g, local) do {                  \
236     local = map->globals[g/bs - start];      \
237     if (local >= 0) local = bs*local + (g % bs); \
238   } while (0)
239 
240 #include <../src/vec/is/utils/isltog.h>
241 
242 #define GTOLTYPE _Basic
243 #define GTOLNAME Block_Basic
244 #define GTOLBS 1
245 #define GTOL(g, local) do {                  \
246     local = map->globals[g - start];         \
247   } while (0)
248 #include <../src/vec/is/utils/isltog.h>
249 
250 #define GTOLTYPE _Hash
251 #define GTOLNAME _Hash
252 #define GTOLBS mapping->bs
253 #define GTOL(g, local) do {                         \
254     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
255     if (local >= 0) local = bs*local + (g % bs);   \
256    } while (0)
257 #include <../src/vec/is/utils/isltog.h>
258 
259 #define GTOLTYPE _Hash
260 #define GTOLNAME Block_Hash
261 #define GTOLBS 1
262 #define GTOL(g, local) do {                         \
263     (void)PetscHMapIGet(map->globalht,g,&local);    \
264   } while (0)
265 #include <../src/vec/is/utils/isltog.h>
266 
267 /*@
268     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
269 
270     Not Collective
271 
272     Input Parameter:
273 .   ltog - local to global mapping
274 
275     Output Parameter:
276 .   nltog - the duplicated local to global mapping
277 
278     Level: advanced
279 
280 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
281 @*/
282 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
283 {
284   PetscErrorCode             ierr;
285   ISLocalToGlobalMappingType l2gtype;
286 
287   PetscFunctionBegin;
288   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
289   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
290   ierr = ISLocalToGlobalMappingGetType(ltog,&l2gtype);CHKERRQ(ierr);
291   ierr = ISLocalToGlobalMappingSetType(*nltog,l2gtype);CHKERRQ(ierr);
292   PetscFunctionReturn(0);
293 }
294 
295 /*@
296     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
297 
298     Not Collective
299 
300     Input Parameter:
301 .   ltog - local to global mapping
302 
303     Output Parameter:
304 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
305 
306     Level: advanced
307 
308 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
309 @*/
310 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
311 {
312   PetscFunctionBegin;
313   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
314   PetscValidIntPointer(n,2);
315   *n = mapping->bs*mapping->n;
316   PetscFunctionReturn(0);
317 }
318 
319 /*@C
320    ISLocalToGlobalMappingViewFromOptions - View from Options
321 
322    Collective on ISLocalToGlobalMapping
323 
324    Input Parameters:
325 +  A - the local to global mapping object
326 .  obj - Optional object
327 -  name - command line option
328 
329    Level: intermediate
330 .seealso:  ISLocalToGlobalMapping, ISLocalToGlobalMappingView, PetscObjectViewFromOptions(), ISLocalToGlobalMappingCreate()
331 @*/
332 PetscErrorCode  ISLocalToGlobalMappingViewFromOptions(ISLocalToGlobalMapping A,PetscObject obj,const char name[])
333 {
334   PetscErrorCode ierr;
335 
336   PetscFunctionBegin;
337   PetscValidHeaderSpecific(A,IS_LTOGM_CLASSID,1);
338   ierr = PetscObjectViewFromOptions((PetscObject)A,obj,name);CHKERRQ(ierr);
339   PetscFunctionReturn(0);
340 }
341 
342 /*@C
343     ISLocalToGlobalMappingView - View a local to global mapping
344 
345     Not Collective
346 
347     Input Parameters:
348 +   ltog - local to global mapping
349 -   viewer - viewer
350 
351     Level: advanced
352 
353 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
354 @*/
355 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
356 {
357   PetscInt       i;
358   PetscMPIInt    rank;
359   PetscBool      iascii;
360   PetscErrorCode ierr;
361 
362   PetscFunctionBegin;
363   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
364   if (!viewer) {
365     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
366   }
367   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
368 
369   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRMPI(ierr);
370   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
371   if (iascii) {
372     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
373     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
374     for (i=0; i<mapping->n; i++) {
375       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %" PetscInt_FMT " %" PetscInt_FMT "\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
376     }
377     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
378     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
379   }
380   PetscFunctionReturn(0);
381 }
382 
383 /*@
384     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
385     ordering and a global parallel ordering.
386 
387     Not collective
388 
389     Input Parameter:
390 .   is - index set containing the global numbers for each local number
391 
392     Output Parameter:
393 .   mapping - new mapping data structure
394 
395     Notes:
396     the block size of the IS determines the block size of the mapping
397     Level: advanced
398 
399 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
400 @*/
401 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
402 {
403   PetscErrorCode ierr;
404   PetscInt       n,bs;
405   const PetscInt *indices;
406   MPI_Comm       comm;
407   PetscBool      isblock;
408 
409   PetscFunctionBegin;
410   PetscValidHeaderSpecific(is,IS_CLASSID,1);
411   PetscValidPointer(mapping,2);
412 
413   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
414   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
415   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
416   if (!isblock) {
417     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
418     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
419     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
420   } else {
421     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
422     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
423     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
424     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
425   }
426   PetscFunctionReturn(0);
427 }
428 
429 /*@C
430     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
431     ordering and a global parallel ordering.
432 
433     Collective
434 
435     Input Parameters:
436 +   sf - star forest mapping contiguous local indices to (rank, offset)
437 -   start - first global index on this process, or PETSC_DECIDE to compute contiguous global numbering automatically
438 
439     Output Parameter:
440 .   mapping - new mapping data structure
441 
442     Level: advanced
443 
444     Notes:
445     If any processor calls this with start = PETSC_DECIDE then all processors must, otherwise the program will hang.
446 
447 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
448 @*/
449 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
450 {
451   PetscErrorCode ierr;
452   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
453   const PetscInt *ilocal;
454   MPI_Comm       comm;
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
458   PetscValidPointer(mapping,3);
459 
460   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
461   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
462   if (start == PETSC_DECIDE) {
463     start = 0;
464     ierr = MPI_Exscan(&nroots,&start,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr);
465   } else PetscCheckFalse(start < 0,comm, PETSC_ERR_ARG_OUTOFRANGE, "start must be nonnegative or PETSC_DECIDE");
466   if (ilocal) {
467     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
468   }
469   else maxlocal = nleaves;
470   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
471   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
472   for (i=0; i<nroots; i++) globals[i] = start + i;
473   for (i=0; i<maxlocal; i++) ltog[i] = -1;
474   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
475   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog,MPI_REPLACE);CHKERRQ(ierr);
476   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
477   ierr = PetscFree(globals);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 /*@
482     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
483 
484     Not collective
485 
486     Input Parameters:
487 +   mapping - mapping data structure
488 -   bs - the blocksize
489 
490     Level: advanced
491 
492 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
493 @*/
494 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
495 {
496   PetscInt       *nid;
497   const PetscInt *oid;
498   PetscInt       i,cn,on,obs,nn;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
503   PetscCheckFalse(bs < 1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %" PetscInt_FMT,bs);
504   if (bs == mapping->bs) PetscFunctionReturn(0);
505   on  = mapping->n;
506   obs = mapping->bs;
507   oid = mapping->indices;
508   nn  = (on*obs)/bs;
509   PetscCheckFalse((on*obs)%bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %" PetscInt_FMT " is inconsistent with block size %" PetscInt_FMT " and number of block indices %" PetscInt_FMT,bs,obs,on);
510 
511   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
512   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
513   for (i=0;i<nn;i++) {
514     PetscInt j;
515     for (j=0,cn=0;j<bs-1;j++) {
516       if (oid[i*bs+j] < 0) { cn++; continue; }
517       PetscCheckFalse(oid[i*bs+j] != oid[i*bs+j+1]-1,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: non consecutive indices %" PetscInt_FMT " %" PetscInt_FMT,bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
518     }
519     if (oid[i*bs+j] < 0) cn++;
520     if (cn) {
521       PetscCheckFalse(cn != bs,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %" PetscInt_FMT " and %" PetscInt_FMT " are incompatible with the block indices: invalid number of negative entries in block %" PetscInt_FMT,bs,obs,cn);
522       nid[i] = -1;
523     } else {
524       nid[i] = oid[i*bs]/bs;
525     }
526   }
527   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
528 
529   mapping->n           = nn;
530   mapping->bs          = bs;
531   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
532   mapping->indices     = nid;
533   mapping->globalstart = 0;
534   mapping->globalend   = 0;
535 
536   /* reset the cached information */
537   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
538   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
539   if (mapping->info_indices) {
540     PetscInt i;
541 
542     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
543     for (i=1; i<mapping->info_nproc; i++) {
544       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
545     }
546     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
547   }
548   mapping->info_cached = PETSC_FALSE;
549 
550   if (mapping->ops->destroy) {
551     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
552   }
553   PetscFunctionReturn(0);
554 }
555 
556 /*@
557     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
558     ordering and a global parallel ordering.
559 
560     Not Collective
561 
562     Input Parameters:
563 .   mapping - mapping data structure
564 
565     Output Parameter:
566 .   bs - the blocksize
567 
568     Level: advanced
569 
570 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
571 @*/
572 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
573 {
574   PetscFunctionBegin;
575   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
576   *bs = mapping->bs;
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
582     ordering and a global parallel ordering.
583 
584     Not Collective, but communicator may have more than one process
585 
586     Input Parameters:
587 +   comm - MPI communicator
588 .   bs - the block size
589 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
590 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
591 -   mode - see PetscCopyMode
592 
593     Output Parameter:
594 .   mapping - new mapping data structure
595 
596     Notes:
597     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
598 
599     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
600     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
601     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
602 
603     Level: advanced
604 
605 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
606           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
607 @*/
608 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
609 {
610   PetscErrorCode ierr;
611   PetscInt       *in;
612 
613   PetscFunctionBegin;
614   if (n) PetscValidIntPointer(indices,4);
615   PetscValidPointer(mapping,6);
616 
617   *mapping = NULL;
618   ierr = ISInitializePackage();CHKERRQ(ierr);
619 
620   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
621   (*mapping)->n  = n;
622   (*mapping)->bs = bs;
623   if (mode == PETSC_COPY_VALUES) {
624     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
625     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
626     (*mapping)->indices = in;
627     (*mapping)->dealloc_indices = PETSC_TRUE;
628     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
629   } else if (mode == PETSC_OWN_POINTER) {
630     (*mapping)->indices = (PetscInt*)indices;
631     (*mapping)->dealloc_indices = PETSC_TRUE;
632     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
633   } else if (mode == PETSC_USE_POINTER) {
634     (*mapping)->indices = (PetscInt*)indices;
635   }
636   else SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Invalid mode %d", mode);
637   PetscFunctionReturn(0);
638 }
639 
640 PetscFunctionList ISLocalToGlobalMappingList = NULL;
641 
642 /*@
643    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
644 
645    Not collective
646 
647    Input Parameters:
648 .  mapping - mapping data structure
649 
650    Level: advanced
651 
652 @*/
653 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
654 {
655   PetscErrorCode             ierr;
656   char                       type[256];
657   ISLocalToGlobalMappingType defaulttype = "Not set";
658   PetscBool                  flg;
659 
660   PetscFunctionBegin;
661   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
662   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
663   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
664   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
665   if (flg) {
666     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
667   }
668   ierr = PetscOptionsEnd();CHKERRQ(ierr);
669   PetscFunctionReturn(0);
670 }
671 
672 /*@
673    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
674    ordering and a global parallel ordering.
675 
676    Note Collective
677 
678    Input Parameters:
679 .  mapping - mapping data structure
680 
681    Level: advanced
682 
683 .seealso: ISLocalToGlobalMappingCreate()
684 @*/
685 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
686 {
687   PetscErrorCode ierr;
688 
689   PetscFunctionBegin;
690   if (!*mapping) PetscFunctionReturn(0);
691   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
692   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = NULL;PetscFunctionReturn(0);}
693   if ((*mapping)->dealloc_indices) {
694     ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
695   }
696   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
697   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
698   if ((*mapping)->info_indices) {
699     PetscInt i;
700 
701     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
702     for (i=1; i<(*mapping)->info_nproc; i++) {
703       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
704     }
705     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
706   }
707   if ((*mapping)->info_nodei) {
708     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
709   }
710   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
711   if ((*mapping)->ops->destroy) {
712     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
713   }
714   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
715   *mapping = NULL;
716   PetscFunctionReturn(0);
717 }
718 
719 /*@
720     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
721     a new index set using the global numbering defined in an ISLocalToGlobalMapping
722     context.
723 
724     Collective on is
725 
726     Input Parameters:
727 +   mapping - mapping between local and global numbering
728 -   is - index set in local numbering
729 
730     Output Parameters:
731 .   newis - index set in global numbering
732 
733     Notes:
734     The output IS will have the same communicator of the input IS.
735 
736     Level: advanced
737 
738 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
739           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
740 @*/
741 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
742 {
743   PetscErrorCode ierr;
744   PetscInt       n,*idxout;
745   const PetscInt *idxin;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
749   PetscValidHeaderSpecific(is,IS_CLASSID,2);
750   PetscValidPointer(newis,3);
751 
752   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
753   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
754   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
755   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
756   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
757   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
758   PetscFunctionReturn(0);
759 }
760 
761 /*@
762    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
763    and converts them to the global numbering.
764 
765    Not collective
766 
767    Input Parameters:
768 +  mapping - the local to global mapping context
769 .  N - number of integers
770 -  in - input indices in local numbering
771 
772    Output Parameter:
773 .  out - indices in global numbering
774 
775    Notes:
776    The in and out array parameters may be identical.
777 
778    Level: advanced
779 
780 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
781           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
782           AOPetscToApplication(), ISGlobalToLocalMappingApply()
783 
784 @*/
785 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
786 {
787   PetscInt i,bs,Nmax;
788 
789   PetscFunctionBegin;
790   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
791   bs   = mapping->bs;
792   Nmax = bs*mapping->n;
793   if (bs == 1) {
794     const PetscInt *idx = mapping->indices;
795     for (i=0; i<N; i++) {
796       if (in[i] < 0) {
797         out[i] = in[i];
798         continue;
799       }
800       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
801       out[i] = idx[in[i]];
802     }
803   } else {
804     const PetscInt *idx = mapping->indices;
805     for (i=0; i<N; i++) {
806       if (in[i] < 0) {
807         out[i] = in[i];
808         continue;
809       }
810       PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
811       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
812     }
813   }
814   PetscFunctionReturn(0);
815 }
816 
817 /*@
818    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
819 
820    Not collective
821 
822    Input Parameters:
823 +  mapping - the local to global mapping context
824 .  N - number of integers
825 -  in - input indices in local block numbering
826 
827    Output Parameter:
828 .  out - indices in global block numbering
829 
830    Notes:
831    The in and out array parameters may be identical.
832 
833    Example:
834      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
835      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
836 
837    Level: advanced
838 
839 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
840           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
841           AOPetscToApplication(), ISGlobalToLocalMappingApply()
842 
843 @*/
844 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
845 {
846   PetscInt       i,Nmax;
847   const PetscInt *idx;
848 
849   PetscFunctionBegin;
850   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
851   Nmax = mapping->n;
852   idx = mapping->indices;
853   for (i=0; i<N; i++) {
854     if (in[i] < 0) {
855       out[i] = in[i];
856       continue;
857     }
858     PetscCheckFalse(in[i] >= Nmax,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %" PetscInt_FMT " too large %" PetscInt_FMT " (max) at %" PetscInt_FMT,in[i],Nmax-1,i);
859     out[i] = idx[in[i]];
860   }
861   PetscFunctionReturn(0);
862 }
863 
864 /*@
865     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
866     specified with a global numbering.
867 
868     Not collective
869 
870     Input Parameters:
871 +   mapping - mapping between local and global numbering
872 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
873            IS_GTOLM_DROP - drops the indices with no local value from the output list
874 .   n - number of global indices to map
875 -   idx - global indices to map
876 
877     Output Parameters:
878 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
879 -   idxout - local index of each global index, one must pass in an array long enough
880              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
881              idxout == NULL to determine the required length (returned in nout)
882              and then allocate the required space and call ISGlobalToLocalMappingApply()
883              a second time to set the values.
884 
885     Notes:
886     Either nout or idxout may be NULL. idx and idxout may be identical.
887 
888     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
889     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
890     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
891 
892     Level: advanced
893 
894     Developer Note: The manual page states that idx and idxout may be identical but the calling
895        sequence declares idx as const so it cannot be the same as idxout.
896 
897 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
898           ISLocalToGlobalMappingDestroy()
899 @*/
900 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
901 {
902   PetscErrorCode ierr;
903 
904   PetscFunctionBegin;
905   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
906   if (!mapping->data) {
907     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
908   }
909   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
910   PetscFunctionReturn(0);
911 }
912 
913 /*@
914     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
915     a new index set using the local numbering defined in an ISLocalToGlobalMapping
916     context.
917 
918     Not collective
919 
920     Input Parameters:
921 +   mapping - mapping between local and global numbering
922 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
923            IS_GTOLM_DROP - drops the indices with no local value from the output list
924 -   is - index set in global numbering
925 
926     Output Parameters:
927 .   newis - index set in local numbering
928 
929     Notes:
930     The output IS will be sequential, as it encodes a purely local operation
931 
932     Level: advanced
933 
934 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
935           ISLocalToGlobalMappingDestroy()
936 @*/
937 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
938 {
939   PetscErrorCode ierr;
940   PetscInt       n,nout,*idxout;
941   const PetscInt *idxin;
942 
943   PetscFunctionBegin;
944   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
945   PetscValidHeaderSpecific(is,IS_CLASSID,3);
946   PetscValidPointer(newis,4);
947 
948   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
949   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
950   if (type == IS_GTOLM_MASK) {
951     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
952   } else {
953     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
954     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
955   }
956   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
957   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
958   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
959   PetscFunctionReturn(0);
960 }
961 
962 /*@
963     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
964     specified with a block global numbering.
965 
966     Not collective
967 
968     Input Parameters:
969 +   mapping - mapping between local and global numbering
970 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
971            IS_GTOLM_DROP - drops the indices with no local value from the output list
972 .   n - number of global indices to map
973 -   idx - global indices to map
974 
975     Output Parameters:
976 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
977 -   idxout - local index of each global index, one must pass in an array long enough
978              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
979              idxout == NULL to determine the required length (returned in nout)
980              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
981              a second time to set the values.
982 
983     Notes:
984     Either nout or idxout may be NULL. idx and idxout may be identical.
985 
986     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
987     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
988     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
989 
990     Level: advanced
991 
992     Developer Note: The manual page states that idx and idxout may be identical but the calling
993        sequence declares idx as const so it cannot be the same as idxout.
994 
995 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
996           ISLocalToGlobalMappingDestroy()
997 @*/
998 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
999                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
1000 {
1001   PetscErrorCode ierr;
1002 
1003   PetscFunctionBegin;
1004   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1005   if (!mapping->data) {
1006     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
1007   }
1008   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 /*@C
1013     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
1014      each index shared by more than one processor
1015 
1016     Collective on ISLocalToGlobalMapping
1017 
1018     Input Parameter:
1019 .   mapping - the mapping from local to global indexing
1020 
1021     Output Parameters:
1022 +   nproc - number of processors that are connected to this one
1023 .   proc - neighboring processors
1024 .   numproc - number of indices for each subdomain (processor)
1025 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1026 
1027     Level: advanced
1028 
1029     Fortran Usage:
1030 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1031 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1032           PetscInt indices[nproc][numprocmax],ierr)
1033         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1034         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1035 
1036 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1037           ISLocalToGlobalMappingRestoreInfo()
1038 @*/
1039 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1040 {
1041   PetscErrorCode ierr;
1042 
1043   PetscFunctionBegin;
1044   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1045   if (mapping->info_cached) {
1046     *nproc    = mapping->info_nproc;
1047     *procs    = mapping->info_procs;
1048     *numprocs = mapping->info_numprocs;
1049     *indices  = mapping->info_indices;
1050   } else {
1051     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1052   }
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1057 {
1058   PetscErrorCode ierr;
1059   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
1060   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
1061   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1062   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1063   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1064   PetscInt       first_procs,first_numprocs,*first_indices;
1065   MPI_Request    *recv_waits,*send_waits;
1066   MPI_Status     recv_status,*send_status,*recv_statuses;
1067   MPI_Comm       comm;
1068   PetscBool      debug = PETSC_FALSE;
1069 
1070   PetscFunctionBegin;
1071   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1072   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1073   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1074   if (size == 1) {
1075     *nproc         = 0;
1076     *procs         = NULL;
1077     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1078     (*numprocs)[0] = 0;
1079     ierr           = PetscNew(indices);CHKERRQ(ierr);
1080     (*indices)[0]  = NULL;
1081     /* save info for reuse */
1082     mapping->info_nproc = *nproc;
1083     mapping->info_procs = *procs;
1084     mapping->info_numprocs = *numprocs;
1085     mapping->info_indices = *indices;
1086     mapping->info_cached = PETSC_TRUE;
1087     PetscFunctionReturn(0);
1088   }
1089 
1090   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1091 
1092   /*
1093     Notes on ISLocalToGlobalMappingGetBlockInfo
1094 
1095     globally owned node - the nodes that have been assigned to this processor in global
1096            numbering, just for this routine.
1097 
1098     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1099            boundary (i.e. is has more than one local owner)
1100 
1101     locally owned node - node that exists on this processors subdomain
1102 
1103     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1104            local subdomain
1105   */
1106   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1107   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1108   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1109 
1110   for (i=0; i<n; i++) {
1111     if (lindices[i] > max) max = lindices[i];
1112   }
1113   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRMPI(ierr);
1114   Ng++;
1115   ierr   = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
1116   ierr   = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
1117   scale  = Ng/size + 1;
1118   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1119   rstart = scale*rank;
1120 
1121   /* determine ownership ranges of global indices */
1122   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1123   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1124 
1125   /* determine owners of each local node  */
1126   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1127   for (i=0; i<n; i++) {
1128     proc             = lindices[i]/scale; /* processor that globally owns this index */
1129     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1130     owner[i]         = proc;
1131     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1132   }
1133   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1134   ierr = PetscInfo(mapping,"Number of global owners for my local data %" PetscInt_FMT "\n",nsends);CHKERRQ(ierr);
1135 
1136   /* inform other processors of number of messages and max length*/
1137   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1138   ierr = PetscInfo(mapping,"Number of local owners for my global data %" PetscInt_FMT "\n",nrecvs);CHKERRQ(ierr);
1139 
1140   /* post receives for owned rows */
1141   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1142   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1143   for (i=0; i<nrecvs; i++) {
1144     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRMPI(ierr);
1145   }
1146 
1147   /* pack messages containing lists of local nodes to owners */
1148   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1149   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1150   starts[0] = 0;
1151   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1152   for (i=0; i<n; i++) {
1153     sends[starts[owner[i]]++] = lindices[i];
1154     sends[starts[owner[i]]++] = i;
1155   }
1156   ierr = PetscFree(owner);CHKERRQ(ierr);
1157   starts[0] = 0;
1158   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1159 
1160   /* send the messages */
1161   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1162   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1163   cnt = 0;
1164   for (i=0; i<size; i++) {
1165     if (nprocs[2*i]) {
1166       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRMPI(ierr);
1167       dest[cnt] = i;
1168       cnt++;
1169     }
1170   }
1171   ierr = PetscFree(starts);CHKERRQ(ierr);
1172 
1173   /* wait on receives */
1174   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1175   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1176   cnt  = nrecvs;
1177   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1178   while (cnt) {
1179     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
1180     /* unpack receives into our local space */
1181     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRMPI(ierr);
1182     source[imdex] = recv_status.MPI_SOURCE;
1183     len[imdex]    = len[imdex]/2;
1184     /* count how many local owners for each of my global owned indices */
1185     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1186     cnt--;
1187   }
1188   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1189 
1190   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1191   nowned  = 0;
1192   nownedm = 0;
1193   for (i=0; i<ng; i++) {
1194     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1195   }
1196 
1197   /* create single array to contain rank of all local owners of each globally owned index */
1198   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1199   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1200   starts[0] = 0;
1201   for (i=1; i<ng; i++) {
1202     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1203     else starts[i] = starts[i-1];
1204   }
1205 
1206   /* for each nontrival globally owned node list all arriving processors */
1207   for (i=0; i<nrecvs; i++) {
1208     for (j=0; j<len[i]; j++) {
1209       node = recvs[2*i*nmax+2*j]-rstart;
1210       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1211     }
1212   }
1213 
1214   if (debug) { /* -----------------------------------  */
1215     starts[0] = 0;
1216     for (i=1; i<ng; i++) {
1217       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1218       else starts[i] = starts[i-1];
1219     }
1220     for (i=0; i<ng; i++) {
1221       if (nownedsenders[i] > 1) {
1222         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %" PetscInt_FMT " local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1223         for (j=0; j<nownedsenders[i]; j++) {
1224           ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1225         }
1226         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1227       }
1228     }
1229     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1230   } /* -----------------------------------  */
1231 
1232   /* wait on original sends */
1233   if (nsends) {
1234     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1235     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
1236     ierr = PetscFree(send_status);CHKERRQ(ierr);
1237   }
1238   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1239   ierr = PetscFree(sends);CHKERRQ(ierr);
1240   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1241 
1242   /* pack messages to send back to local owners */
1243   starts[0] = 0;
1244   for (i=1; i<ng; i++) {
1245     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1246     else starts[i] = starts[i-1];
1247   }
1248   nsends2 = nrecvs;
1249   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1250   for (i=0; i<nrecvs; i++) {
1251     nprocs[i] = 1;
1252     for (j=0; j<len[i]; j++) {
1253       node = recvs[2*i*nmax+2*j]-rstart;
1254       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1255     }
1256   }
1257   nt = 0;
1258   for (i=0; i<nsends2; i++) nt += nprocs[i];
1259 
1260   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1261   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1262 
1263   starts2[0] = 0;
1264   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1265   /*
1266      Each message is 1 + nprocs[i] long, and consists of
1267        (0) the number of nodes being sent back
1268        (1) the local node number,
1269        (2) the number of processors sharing it,
1270        (3) the processors sharing it
1271   */
1272   for (i=0; i<nsends2; i++) {
1273     cnt = 1;
1274     sends2[starts2[i]] = 0;
1275     for (j=0; j<len[i]; j++) {
1276       node = recvs[2*i*nmax+2*j]-rstart;
1277       if (nownedsenders[node] > 1) {
1278         sends2[starts2[i]]++;
1279         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1280         sends2[starts2[i]+cnt++] = nownedsenders[node];
1281         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1282         cnt += nownedsenders[node];
1283       }
1284     }
1285   }
1286 
1287   /* receive the message lengths */
1288   nrecvs2 = nsends;
1289   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1290   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1291   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1292   for (i=0; i<nrecvs2; i++) {
1293     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRMPI(ierr);
1294   }
1295 
1296   /* send the message lengths */
1297   for (i=0; i<nsends2; i++) {
1298     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRMPI(ierr);
1299   }
1300 
1301   /* wait on receives of lens */
1302   if (nrecvs2) {
1303     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1304     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1305     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1306   }
1307   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1308 
1309   starts3[0] = 0;
1310   nt         = 0;
1311   for (i=0; i<nrecvs2-1; i++) {
1312     starts3[i+1] = starts3[i] + lens2[i];
1313     nt          += lens2[i];
1314   }
1315   if (nrecvs2) nt += lens2[nrecvs2-1];
1316 
1317   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1318   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1319   for (i=0; i<nrecvs2; i++) {
1320     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRMPI(ierr);
1321   }
1322 
1323   /* send the messages */
1324   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1325   for (i=0; i<nsends2; i++) {
1326     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRMPI(ierr);
1327   }
1328 
1329   /* wait on receives */
1330   if (nrecvs2) {
1331     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1332     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRMPI(ierr);
1333     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1334   }
1335   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1336   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1337 
1338   if (debug) { /* -----------------------------------  */
1339     cnt = 0;
1340     for (i=0; i<nrecvs2; i++) {
1341       nt = recvs2[cnt++];
1342       for (j=0; j<nt; j++) {
1343         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %" PetscInt_FMT " number of subdomains %" PetscInt_FMT ": ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1344         for (k=0; k<recvs2[cnt+1]; k++) {
1345           ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1346         }
1347         cnt += 2 + recvs2[cnt+1];
1348         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1349       }
1350     }
1351     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1352   } /* -----------------------------------  */
1353 
1354   /* count number subdomains for each local node */
1355   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1356   cnt  = 0;
1357   for (i=0; i<nrecvs2; i++) {
1358     nt = recvs2[cnt++];
1359     for (j=0; j<nt; j++) {
1360       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1361       cnt += 2 + recvs2[cnt+1];
1362     }
1363   }
1364   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1365   *nproc    = nt;
1366   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1367   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1368   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1369   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1370   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1371   cnt  = 0;
1372   for (i=0; i<size; i++) {
1373     if (nprocs[i] > 0) {
1374       bprocs[i]        = cnt;
1375       (*procs)[cnt]    = i;
1376       (*numprocs)[cnt] = nprocs[i];
1377       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1378       cnt++;
1379     }
1380   }
1381 
1382   /* make the list of subdomains for each nontrivial local node */
1383   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1384   cnt  = 0;
1385   for (i=0; i<nrecvs2; i++) {
1386     nt = recvs2[cnt++];
1387     for (j=0; j<nt; j++) {
1388       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1389       cnt += 2 + recvs2[cnt+1];
1390     }
1391   }
1392   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1393   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1394 
1395   /* sort the node indexing by their global numbers */
1396   nt = *nproc;
1397   for (i=0; i<nt; i++) {
1398     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1399     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1400     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1401     ierr = PetscFree(tmp);CHKERRQ(ierr);
1402   }
1403 
1404   if (debug) { /* -----------------------------------  */
1405     nt = *nproc;
1406     for (i=0; i<nt; i++) {
1407       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %" PetscInt_FMT " number of indices %" PetscInt_FMT ": ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1408       for (j=0; j<(*numprocs)[i]; j++) {
1409         ierr = PetscSynchronizedPrintf(comm,"%" PetscInt_FMT " ",(*indices)[i][j]);CHKERRQ(ierr);
1410       }
1411       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1412     }
1413     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1414   } /* -----------------------------------  */
1415 
1416   /* wait on sends */
1417   if (nsends2) {
1418     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1419     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRMPI(ierr);
1420     ierr = PetscFree(send_status);CHKERRQ(ierr);
1421   }
1422 
1423   ierr = PetscFree(starts3);CHKERRQ(ierr);
1424   ierr = PetscFree(dest);CHKERRQ(ierr);
1425   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1426 
1427   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1428   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1429   ierr = PetscFree(starts);CHKERRQ(ierr);
1430   ierr = PetscFree(starts2);CHKERRQ(ierr);
1431   ierr = PetscFree(lens2);CHKERRQ(ierr);
1432 
1433   ierr = PetscFree(source);CHKERRQ(ierr);
1434   ierr = PetscFree(len);CHKERRQ(ierr);
1435   ierr = PetscFree(recvs);CHKERRQ(ierr);
1436   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1437   ierr = PetscFree(sends2);CHKERRQ(ierr);
1438 
1439   /* put the information about myself as the first entry in the list */
1440   first_procs    = (*procs)[0];
1441   first_numprocs = (*numprocs)[0];
1442   first_indices  = (*indices)[0];
1443   for (i=0; i<*nproc; i++) {
1444     if ((*procs)[i] == rank) {
1445       (*procs)[0]    = (*procs)[i];
1446       (*numprocs)[0] = (*numprocs)[i];
1447       (*indices)[0]  = (*indices)[i];
1448       (*procs)[i]    = first_procs;
1449       (*numprocs)[i] = first_numprocs;
1450       (*indices)[i]  = first_indices;
1451       break;
1452     }
1453   }
1454 
1455   /* save info for reuse */
1456   mapping->info_nproc = *nproc;
1457   mapping->info_procs = *procs;
1458   mapping->info_numprocs = *numprocs;
1459   mapping->info_indices = *indices;
1460   mapping->info_cached = PETSC_TRUE;
1461   PetscFunctionReturn(0);
1462 }
1463 
1464 /*@C
1465     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1466 
1467     Collective on ISLocalToGlobalMapping
1468 
1469     Input Parameter:
1470 .   mapping - the mapping from local to global indexing
1471 
1472     Output Parameters:
1473 +   nproc - number of processors that are connected to this one
1474 .   proc - neighboring processors
1475 .   numproc - number of indices for each processor
1476 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1477 
1478     Level: advanced
1479 
1480 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1481           ISLocalToGlobalMappingGetInfo()
1482 @*/
1483 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1484 {
1485   PetscErrorCode ierr;
1486 
1487   PetscFunctionBegin;
1488   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1489   if (mapping->info_free) {
1490     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1491     if (*indices) {
1492       PetscInt i;
1493 
1494       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1495       for (i=1; i<*nproc; i++) {
1496         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1497       }
1498       ierr = PetscFree(*indices);CHKERRQ(ierr);
1499     }
1500   }
1501   *nproc    = 0;
1502   *procs    = NULL;
1503   *numprocs = NULL;
1504   *indices  = NULL;
1505   PetscFunctionReturn(0);
1506 }
1507 
1508 /*@C
1509     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1510      each index shared by more than one processor
1511 
1512     Collective on ISLocalToGlobalMapping
1513 
1514     Input Parameter:
1515 .   mapping - the mapping from local to global indexing
1516 
1517     Output Parameters:
1518 +   nproc - number of processors that are connected to this one
1519 .   proc - neighboring processors
1520 .   numproc - number of indices for each subdomain (processor)
1521 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1522 
1523     Level: advanced
1524 
1525     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1526 
1527     Fortran Usage:
1528 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1529 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1530           PetscInt indices[nproc][numprocmax],ierr)
1531         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1532         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1533 
1534 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1535           ISLocalToGlobalMappingRestoreInfo()
1536 @*/
1537 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1538 {
1539   PetscErrorCode ierr;
1540   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs,i,j,k;
1541 
1542   PetscFunctionBegin;
1543   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1544   bs = mapping->bs;
1545   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1546   if (bs > 1) { /* we need to expand the cached info */
1547     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1548     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1549     for (i=0; i<*nproc; i++) {
1550       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1551       for (j=0; j<bnumprocs[i]; j++) {
1552         for (k=0; k<bs; k++) {
1553           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1554         }
1555       }
1556       (*numprocs)[i] = bnumprocs[i]*bs;
1557     }
1558     mapping->info_free = PETSC_TRUE;
1559   } else {
1560     *numprocs = bnumprocs;
1561     *indices  = bindices;
1562   }
1563   PetscFunctionReturn(0);
1564 }
1565 
1566 /*@C
1567     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1568 
1569     Collective on ISLocalToGlobalMapping
1570 
1571     Input Parameter:
1572 .   mapping - the mapping from local to global indexing
1573 
1574     Output Parameters:
1575 +   nproc - number of processors that are connected to this one
1576 .   proc - neighboring processors
1577 .   numproc - number of indices for each processor
1578 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1579 
1580     Level: advanced
1581 
1582 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1583           ISLocalToGlobalMappingGetInfo()
1584 @*/
1585 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1586 {
1587   PetscErrorCode ierr;
1588 
1589   PetscFunctionBegin;
1590   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1591   PetscFunctionReturn(0);
1592 }
1593 
1594 /*@C
1595     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1596 
1597     Collective on ISLocalToGlobalMapping
1598 
1599     Input Parameter:
1600 .   mapping - the mapping from local to global indexing
1601 
1602     Output Parameters:
1603 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1604 .   count - number of neighboring processors per node
1605 -   indices - indices of processes sharing the node (sorted)
1606 
1607     Level: advanced
1608 
1609     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1610 
1611 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1612           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1613 @*/
1614 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1615 {
1616   PetscInt       n;
1617   PetscErrorCode ierr;
1618 
1619   PetscFunctionBegin;
1620   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1621   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1622   if (!mapping->info_nodec) {
1623     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1624 
1625     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1626     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1627     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1628     m = n;
1629     mapping->info_nodec[n] = 0;
1630     for (i=1;i<n_neigh;i++) {
1631       PetscInt j;
1632 
1633       m += n_shared[i];
1634       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1635     }
1636     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1637     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1638     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1639     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1640     for (i=1;i<n_neigh;i++) {
1641       PetscInt j;
1642 
1643       for (j=0;j<n_shared[i];j++) {
1644         PetscInt k = shared[i][j];
1645 
1646         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1647         mapping->info_nodec[k] += 1;
1648       }
1649     }
1650     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1651     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1652   }
1653   if (nnodes)  *nnodes  = n;
1654   if (count)   *count   = mapping->info_nodec;
1655   if (indices) *indices = mapping->info_nodei;
1656   PetscFunctionReturn(0);
1657 }
1658 
1659 /*@C
1660     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1661 
1662     Collective on ISLocalToGlobalMapping
1663 
1664     Input Parameter:
1665 .   mapping - the mapping from local to global indexing
1666 
1667     Output Parameters:
1668 +   nnodes - number of local nodes
1669 .   count - number of neighboring processors per node
1670 -   indices - indices of processes sharing the node (sorted)
1671 
1672     Level: advanced
1673 
1674 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1675           ISLocalToGlobalMappingGetInfo()
1676 @*/
1677 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1678 {
1679   PetscFunctionBegin;
1680   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1681   if (nnodes)  *nnodes  = 0;
1682   if (count)   *count   = NULL;
1683   if (indices) *indices = NULL;
1684   PetscFunctionReturn(0);
1685 }
1686 
1687 /*@C
1688    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1689 
1690    Not Collective
1691 
1692    Input Parameter:
1693 . ltog - local to global mapping
1694 
1695    Output Parameter:
1696 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1697 
1698    Level: advanced
1699 
1700    Notes:
1701     ISLocalToGlobalMappingGetSize() returns the length the this array
1702 
1703 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1704 @*/
1705 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1706 {
1707   PetscFunctionBegin;
1708   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1709   PetscValidPointer(array,2);
1710   if (ltog->bs == 1) {
1711     *array = ltog->indices;
1712   } else {
1713     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1714     const PetscInt *ii;
1715     PetscErrorCode ierr;
1716 
1717     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1718     *array = jj;
1719     k    = 0;
1720     ii   = ltog->indices;
1721     for (i=0; i<n; i++)
1722       for (j=0; j<bs; j++)
1723         jj[k++] = bs*ii[i] + j;
1724   }
1725   PetscFunctionReturn(0);
1726 }
1727 
1728 /*@C
1729    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1730 
1731    Not Collective
1732 
1733    Input Parameters:
1734 + ltog - local to global mapping
1735 - array - array of indices
1736 
1737    Level: advanced
1738 
1739 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1740 @*/
1741 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1742 {
1743   PetscFunctionBegin;
1744   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1745   PetscValidPointer(array,2);
1746   PetscCheckFalse(ltog->bs == 1 && *array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1747 
1748   if (ltog->bs > 1) {
1749     PetscErrorCode ierr;
1750     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1751   }
1752   PetscFunctionReturn(0);
1753 }
1754 
1755 /*@C
1756    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1757 
1758    Not Collective
1759 
1760    Input Parameter:
1761 . ltog - local to global mapping
1762 
1763    Output Parameter:
1764 . array - array of indices
1765 
1766    Level: advanced
1767 
1768 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1769 @*/
1770 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1771 {
1772   PetscFunctionBegin;
1773   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1774   PetscValidPointer(array,2);
1775   *array = ltog->indices;
1776   PetscFunctionReturn(0);
1777 }
1778 
1779 /*@C
1780    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1781 
1782    Not Collective
1783 
1784    Input Parameters:
1785 + ltog - local to global mapping
1786 - array - array of indices
1787 
1788    Level: advanced
1789 
1790 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1791 @*/
1792 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1793 {
1794   PetscFunctionBegin;
1795   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1796   PetscValidPointer(array,2);
1797   PetscCheckFalse(*array != ltog->indices,PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1798   *array = NULL;
1799   PetscFunctionReturn(0);
1800 }
1801 
1802 /*@C
1803    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1804 
1805    Not Collective
1806 
1807    Input Parameters:
1808 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1809 . n - number of mappings to concatenate
1810 - ltogs - local to global mappings
1811 
1812    Output Parameter:
1813 . ltogcat - new mapping
1814 
1815    Note: this currently always returns a mapping with block size of 1
1816 
1817    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1818 
1819    Level: advanced
1820 
1821 .seealso: ISLocalToGlobalMappingCreate()
1822 @*/
1823 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1824 {
1825   PetscInt       i,cnt,m,*idx;
1826   PetscErrorCode ierr;
1827 
1828   PetscFunctionBegin;
1829   PetscCheckFalse(n < 0,comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %" PetscInt_FMT,n);
1830   if (n > 0) PetscValidPointer(ltogs,3);
1831   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1832   PetscValidPointer(ltogcat,4);
1833   for (cnt=0,i=0; i<n; i++) {
1834     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1835     cnt += m;
1836   }
1837   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1838   for (cnt=0,i=0; i<n; i++) {
1839     const PetscInt *subidx;
1840     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1841     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1842     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1843     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1844     cnt += m;
1845   }
1846   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1847   PetscFunctionReturn(0);
1848 }
1849 
1850 /*MC
1851       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1852                                     used this is good for only small and moderate size problems.
1853 
1854    Options Database Keys:
1855 .   -islocaltoglobalmapping_type basic - select this method
1856 
1857    Level: beginner
1858 
1859 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1860 M*/
1861 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1862 {
1863   PetscFunctionBegin;
1864   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1865   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1866   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1867   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1868   PetscFunctionReturn(0);
1869 }
1870 
1871 /*MC
1872       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1873                                     used this is good for large memory problems.
1874 
1875    Options Database Keys:
1876 .   -islocaltoglobalmapping_type hash - select this method
1877 
1878    Notes:
1879     This is selected automatically for large problems if the user does not set the type.
1880 
1881    Level: beginner
1882 
1883 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1884 M*/
1885 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1886 {
1887   PetscFunctionBegin;
1888   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1889   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1890   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1891   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1892   PetscFunctionReturn(0);
1893 }
1894 
1895 /*@C
1896     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1897 
1898    Not Collective
1899 
1900    Input Parameters:
1901 +  sname - name of a new method
1902 -  routine_create - routine to create method context
1903 
1904    Notes:
1905    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1906 
1907    Sample usage:
1908 .vb
1909    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1910 .ve
1911 
1912    Then, your mapping can be chosen with the procedural interface via
1913 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1914    or at runtime via the option
1915 $     -islocaltoglobalmapping_type my_mapper
1916 
1917    Level: advanced
1918 
1919 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1920 
1921 @*/
1922 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1923 {
1924   PetscErrorCode ierr;
1925 
1926   PetscFunctionBegin;
1927   ierr = ISInitializePackage();CHKERRQ(ierr);
1928   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1929   PetscFunctionReturn(0);
1930 }
1931 
1932 /*@C
1933    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1934 
1935    Logically Collective on ISLocalToGlobalMapping
1936 
1937    Input Parameters:
1938 +  ltog - the ISLocalToGlobalMapping object
1939 -  type - a known method
1940 
1941    Options Database Key:
1942 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1943     of available methods (for instance, basic or hash)
1944 
1945    Notes:
1946    See "petsc/include/petscis.h" for available methods
1947 
1948   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1949   then set the ISLocalToGlobalMapping type from the options database rather than by using
1950   this routine.
1951 
1952   Level: intermediate
1953 
1954   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1955   are accessed by ISLocalToGlobalMappingSetType().
1956 
1957 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingGetType()
1958 @*/
1959 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1960 {
1961   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping) = NULL;
1962   PetscBool      match;
1963 
1964   PetscFunctionBegin;
1965   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1966   if (type) PetscValidCharPointer(type,2);
1967 
1968   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1969   if (match) PetscFunctionReturn(0);
1970 
1971   /* L2G maps defer type setup at globaltolocal calls, allow passing NULL here */
1972   if (type) {
1973     ierr = PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1974     PetscCheck(r,PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1975   }
1976   /* Destroy the previous private LTOG context */
1977   if (ltog->ops->destroy) {
1978     ierr = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1979     ltog->ops->destroy = NULL;
1980   }
1981   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1982   if (r) { ierr = (*r)(ltog);CHKERRQ(ierr); }
1983   PetscFunctionReturn(0);
1984 }
1985 
1986 /*@C
1987    ISLocalToGlobalMappingGetType - Get the type of the l2g map
1988 
1989    Not Collective
1990 
1991    Input Parameter:
1992 .  ltog - the ISLocalToGlobalMapping object
1993 
1994    Output Parameter:
1995 .  type - the type
1996 
1997 .seealso: ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType()
1998 @*/
1999 PetscErrorCode  ISLocalToGlobalMappingGetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType *type)
2000 {
2001   PetscFunctionBegin;
2002   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
2003   PetscValidPointer(type,2);
2004   *type = ((PetscObject)ltog)->type_name;
2005   PetscFunctionReturn(0);
2006 }
2007 
2008 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
2009 
2010 /*@C
2011   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
2012 
2013   Not Collective
2014 
2015   Level: advanced
2016 
2017 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
2018 @*/
2019 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
2020 {
2021   PetscErrorCode ierr;
2022 
2023   PetscFunctionBegin;
2024   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
2025   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
2026   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
2027   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
2028   PetscFunctionReturn(0);
2029 }
2030