xref: /petsc/src/vec/is/utils/isltog.c (revision 285fb4e2b69b3de46a0633bd0adc6a7f684caa1e)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 /* -----------------------------------------------------------------------------------------*/
20 
21 /*
22     Creates the global mapping information in the ISLocalToGlobalMapping structure
23 
24     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
25 */
26 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
27 {
28   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   if (mapping->data) PetscFunctionReturn(0);
33   end   = 0;
34   start = PETSC_MAX_INT;
35 
36   for (i=0; i<n; i++) {
37     if (idx[i] < 0) continue;
38     if (idx[i] < start) start = idx[i];
39     if (idx[i] > end)   end   = idx[i];
40   }
41   if (start > end) {start = 0; end = -1;}
42   mapping->globalstart = start;
43   mapping->globalend   = end;
44   if (!((PetscObject)mapping)->type_name) {
45     if ((end - start) > PetscMax(4*n,1000000)) {
46       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
47     } else {
48       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
49     }
50   }
51   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
56 {
57   PetscErrorCode              ierr;
58   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
59   ISLocalToGlobalMapping_Basic *map;
60 
61   PetscFunctionBegin;
62   start            = mapping->globalstart;
63   end              = mapping->globalend;
64   ierr             = PetscNew(&map);CHKERRQ(ierr);
65   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
66   map->globals     = globals;
67   for (i=0; i<end-start+1; i++) globals[i] = -1;
68   for (i=0; i<n; i++) {
69     if (idx[i] < 0) continue;
70     globals[idx[i] - start] = i;
71   }
72   mapping->data = (void*)map;
73   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
78 {
79   PetscErrorCode              ierr;
80   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
81   ISLocalToGlobalMapping_Hash *map;
82 
83   PetscFunctionBegin;
84   ierr = PetscNew(&map);CHKERRQ(ierr);
85   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
86   for (i=0; i<n; i++ ) {
87     if (idx[i] < 0) continue;
88     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
89   }
90   mapping->data = (void*)map;
91   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
92   PetscFunctionReturn(0);
93 }
94 
95 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
96 {
97   PetscErrorCode              ierr;
98   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
99 
100   PetscFunctionBegin;
101   if (!map) PetscFunctionReturn(0);
102   ierr = PetscFree(map->globals);CHKERRQ(ierr);
103   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
108 {
109   PetscErrorCode              ierr;
110   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
111 
112   PetscFunctionBegin;
113   if (!map) PetscFunctionReturn(0);
114   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
115   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
116   PetscFunctionReturn(0);
117 }
118 
119 #define GTOLTYPE _Basic
120 #define GTOLNAME _Basic
121 #define GTOLBS mapping->bs
122 #define GTOL(g, local) do {                  \
123     local = map->globals[g/bs - start];      \
124     local = bs*local + (g % bs);             \
125   } while (0)
126 
127 #include <../src/vec/is/utils/isltog.h>
128 
129 #define GTOLTYPE _Basic
130 #define GTOLNAME Block_Basic
131 #define GTOLBS 1
132 #define GTOL(g, local) do {                  \
133     local = map->globals[g - start];         \
134   } while (0)
135 #include <../src/vec/is/utils/isltog.h>
136 
137 #define GTOLTYPE _Hash
138 #define GTOLNAME _Hash
139 #define GTOLBS mapping->bs
140 #define GTOL(g, local) do {                         \
141     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
142     local = bs*local + (g % bs);                    \
143    } while (0)
144 #include <../src/vec/is/utils/isltog.h>
145 
146 #define GTOLTYPE _Hash
147 #define GTOLNAME Block_Hash
148 #define GTOLBS 1
149 #define GTOL(g, local) do {                         \
150     (void)PetscHMapIGet(map->globalht,g,&local);    \
151   } while (0)
152 #include <../src/vec/is/utils/isltog.h>
153 
154 /*@
155     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
156 
157     Not Collective
158 
159     Input Parameter:
160 .   ltog - local to global mapping
161 
162     Output Parameter:
163 .   nltog - the duplicated local to global mapping
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
170 @*/
171 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
172 {
173   PetscErrorCode ierr;
174 
175   PetscFunctionBegin;
176   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
177   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 /*@
182     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
183 
184     Not Collective
185 
186     Input Parameter:
187 .   ltog - local to global mapping
188 
189     Output Parameter:
190 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
191 
192     Level: advanced
193 
194     Concepts: mapping^local to global
195 
196 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
197 @*/
198 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
199 {
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
202   PetscValidIntPointer(n,2);
203   *n = mapping->bs*mapping->n;
204   PetscFunctionReturn(0);
205 }
206 
207 /*@C
208     ISLocalToGlobalMappingView - View a local to global mapping
209 
210     Not Collective
211 
212     Input Parameters:
213 +   ltog - local to global mapping
214 -   viewer - viewer
215 
216     Level: advanced
217 
218     Concepts: mapping^local to global
219 
220 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
221 @*/
222 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
223 {
224   PetscInt       i;
225   PetscMPIInt    rank;
226   PetscBool      iascii;
227   PetscErrorCode ierr;
228 
229   PetscFunctionBegin;
230   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
231   if (!viewer) {
232     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
233   }
234   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
235 
236   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
237   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
238   if (iascii) {
239     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
240     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
241     for (i=0; i<mapping->n; i++) {
242       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
243     }
244     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
245     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
246   }
247   PetscFunctionReturn(0);
248 }
249 
250 /*@
251     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
252     ordering and a global parallel ordering.
253 
254     Not collective
255 
256     Input Parameter:
257 .   is - index set containing the global numbers for each local number
258 
259     Output Parameter:
260 .   mapping - new mapping data structure
261 
262     Notes:
263     the block size of the IS determines the block size of the mapping
264     Level: advanced
265 
266     Concepts: mapping^local to global
267 
268 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
269 @*/
270 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
271 {
272   PetscErrorCode ierr;
273   PetscInt       n,bs;
274   const PetscInt *indices;
275   MPI_Comm       comm;
276   PetscBool      isblock;
277 
278   PetscFunctionBegin;
279   PetscValidHeaderSpecific(is,IS_CLASSID,1);
280   PetscValidPointer(mapping,2);
281 
282   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
283   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
284   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
285   if (!isblock) {
286     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
287     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
288     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
289   } else {
290     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
291     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
292     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
293     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 /*@C
299     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
300     ordering and a global parallel ordering.
301 
302     Collective
303 
304     Input Parameter:
305 +   sf - star forest mapping contiguous local indices to (rank, offset)
306 -   start - first global index on this process
307 
308     Output Parameter:
309 .   mapping - new mapping data structure
310 
311     Level: advanced
312 
313     Concepts: mapping^local to global
314 
315 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
316 @*/
317 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
318 {
319   PetscErrorCode ierr;
320   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
321   const PetscInt *ilocal;
322   MPI_Comm       comm;
323 
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
326   PetscValidPointer(mapping,3);
327 
328   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
329   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
330   if (ilocal) {
331     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
332   }
333   else maxlocal = nleaves;
334   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
335   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
336   for (i=0; i<nroots; i++) globals[i] = start + i;
337   for (i=0; i<maxlocal; i++) ltog[i] = -1;
338   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
339   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
340   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
341   ierr = PetscFree(globals);CHKERRQ(ierr);
342   PetscFunctionReturn(0);
343 }
344 
345 /*@
346     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
347 
348     Not collective
349 
350     Input Parameters:
351 .   mapping - mapping data structure
352 .   bs - the blocksize
353 
354     Level: advanced
355 
356     Concepts: mapping^local to global
357 
358 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
359 @*/
360 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
361 {
362   PetscInt       *nid;
363   const PetscInt *oid;
364   PetscInt       i,cn,on,obs,nn;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
369   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
370   if (bs == mapping->bs) PetscFunctionReturn(0);
371   on  = mapping->n;
372   obs = mapping->bs;
373   oid = mapping->indices;
374   nn  = (on*obs)/bs;
375   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
376 
377   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
378   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
379   for (i=0;i<nn;i++) {
380     PetscInt j;
381     for (j=0,cn=0;j<bs-1;j++) {
382       if (oid[i*bs+j] < 0) { cn++; continue; }
383       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
384     }
385     if (oid[i*bs+j] < 0) cn++;
386     if (cn) {
387       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
388       nid[i] = -1;
389     } else {
390       nid[i] = oid[i*bs]/bs;
391     }
392   }
393   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
394 
395   mapping->n           = nn;
396   mapping->bs          = bs;
397   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
398   mapping->indices     = nid;
399   mapping->globalstart = 0;
400   mapping->globalend   = 0;
401   if (mapping->ops->destroy) {
402     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
403   }
404   PetscFunctionReturn(0);
405 }
406 
407 /*@
408     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
409     ordering and a global parallel ordering.
410 
411     Not Collective
412 
413     Input Parameters:
414 .   mapping - mapping data structure
415 
416     Output Parameter:
417 .   bs - the blocksize
418 
419     Level: advanced
420 
421     Concepts: mapping^local to global
422 
423 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
424 @*/
425 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
426 {
427   PetscFunctionBegin;
428   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
429   *bs = mapping->bs;
430   PetscFunctionReturn(0);
431 }
432 
433 /*@
434     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
435     ordering and a global parallel ordering.
436 
437     Not Collective, but communicator may have more than one process
438 
439     Input Parameters:
440 +   comm - MPI communicator
441 .   bs - the block size
442 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
443 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
444 -   mode - see PetscCopyMode
445 
446     Output Parameter:
447 .   mapping - new mapping data structure
448 
449     Notes:
450     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
451 
452     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
453     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
454     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
455 
456     Level: advanced
457 
458     Concepts: mapping^local to global
459 
460 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
461           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
462 @*/
463 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
464 {
465   PetscErrorCode ierr;
466   PetscInt       *in;
467 
468   PetscFunctionBegin;
469   if (n) PetscValidIntPointer(indices,3);
470   PetscValidPointer(mapping,4);
471 
472   *mapping = NULL;
473   ierr = ISInitializePackage();CHKERRQ(ierr);
474 
475   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
476                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
477   (*mapping)->n             = n;
478   (*mapping)->bs            = bs;
479   (*mapping)->info_cached   = PETSC_FALSE;
480   (*mapping)->info_free     = PETSC_FALSE;
481   (*mapping)->info_procs    = NULL;
482   (*mapping)->info_numprocs = NULL;
483   (*mapping)->info_indices  = NULL;
484 
485   (*mapping)->ops->globaltolocalmappingapply      = NULL;
486   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
487   (*mapping)->ops->destroy                        = NULL;
488   if (mode == PETSC_COPY_VALUES) {
489     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
490     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
491     (*mapping)->indices = in;
492     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
493   } else if (mode == PETSC_OWN_POINTER) {
494     (*mapping)->indices = (PetscInt*)indices;
495     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
496   }
497   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
498   PetscFunctionReturn(0);
499 }
500 
501 PetscFunctionList ISLocalToGlobalMappingList = NULL;
502 
503 
504 /*@
505    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
506 
507    Not collective
508 
509    Input Parameters:
510 .  mapping - mapping data structure
511 
512    Level: advanced
513 
514 @*/
515 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
516 {
517   PetscErrorCode             ierr;
518   char                       type[256];
519   ISLocalToGlobalMappingType defaulttype = "Not set";
520   PetscBool                  flg;
521 
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
524   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
525   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
526   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
527   if (flg) {
528     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
529   }
530   ierr = PetscOptionsEnd();CHKERRQ(ierr);
531   PetscFunctionReturn(0);
532 }
533 
534 /*@
535    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
536    ordering and a global parallel ordering.
537 
538    Note Collective
539 
540    Input Parameters:
541 .  mapping - mapping data structure
542 
543    Level: advanced
544 
545 .seealso: ISLocalToGlobalMappingCreate()
546 @*/
547 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
548 {
549   PetscErrorCode ierr;
550 
551   PetscFunctionBegin;
552   if (!*mapping) PetscFunctionReturn(0);
553   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
554   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
555   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
556   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
557   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
558   if ((*mapping)->info_indices) {
559     PetscInt i;
560 
561     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
562     for (i=1; i<(*mapping)->info_nproc; i++) {
563       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
564     }
565     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
566   }
567   if ((*mapping)->ops->destroy) {
568     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
569   }
570   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
571   *mapping = 0;
572   PetscFunctionReturn(0);
573 }
574 
575 /*@
576     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
577     a new index set using the global numbering defined in an ISLocalToGlobalMapping
578     context.
579 
580     Not collective
581 
582     Input Parameters:
583 +   mapping - mapping between local and global numbering
584 -   is - index set in local numbering
585 
586     Output Parameters:
587 .   newis - index set in global numbering
588 
589     Level: advanced
590 
591     Concepts: mapping^local to global
592 
593 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
594           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
595 @*/
596 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
597 {
598   PetscErrorCode ierr;
599   PetscInt       n,*idxout;
600   const PetscInt *idxin;
601 
602   PetscFunctionBegin;
603   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
604   PetscValidHeaderSpecific(is,IS_CLASSID,2);
605   PetscValidPointer(newis,3);
606 
607   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
608   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
609   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
610   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
611   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
612   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
613   PetscFunctionReturn(0);
614 }
615 
616 /*@
617    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
618    and converts them to the global numbering.
619 
620    Not collective
621 
622    Input Parameters:
623 +  mapping - the local to global mapping context
624 .  N - number of integers
625 -  in - input indices in local numbering
626 
627    Output Parameter:
628 .  out - indices in global numbering
629 
630    Notes:
631    The in and out array parameters may be identical.
632 
633    Level: advanced
634 
635 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
636           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
637           AOPetscToApplication(), ISGlobalToLocalMappingApply()
638 
639     Concepts: mapping^local to global
640 @*/
641 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
642 {
643   PetscInt i,bs,Nmax;
644 
645   PetscFunctionBegin;
646   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
647   bs   = mapping->bs;
648   Nmax = bs*mapping->n;
649   if (bs == 1) {
650     const PetscInt *idx = mapping->indices;
651     for (i=0; i<N; i++) {
652       if (in[i] < 0) {
653         out[i] = in[i];
654         continue;
655       }
656       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
657       out[i] = idx[in[i]];
658     }
659   } else {
660     const PetscInt *idx = mapping->indices;
661     for (i=0; i<N; i++) {
662       if (in[i] < 0) {
663         out[i] = in[i];
664         continue;
665       }
666       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
667       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
668     }
669   }
670   PetscFunctionReturn(0);
671 }
672 
673 /*@
674    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
675 
676    Not collective
677 
678    Input Parameters:
679 +  mapping - the local to global mapping context
680 .  N - number of integers
681 -  in - input indices in local block numbering
682 
683    Output Parameter:
684 .  out - indices in global block numbering
685 
686    Notes:
687    The in and out array parameters may be identical.
688 
689    Example:
690      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
691      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
692 
693    Level: advanced
694 
695 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
696           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
697           AOPetscToApplication(), ISGlobalToLocalMappingApply()
698 
699     Concepts: mapping^local to global
700 @*/
701 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
702 {
703 
704   PetscFunctionBegin;
705   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
706   {
707     PetscInt i,Nmax = mapping->n;
708     const PetscInt *idx = mapping->indices;
709 
710     for (i=0; i<N; i++) {
711       if (in[i] < 0) {
712         out[i] = in[i];
713         continue;
714       }
715       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
716       out[i] = idx[in[i]];
717     }
718   }
719   PetscFunctionReturn(0);
720 }
721 
722 /*@
723     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
724     specified with a global numbering.
725 
726     Not collective
727 
728     Input Parameters:
729 +   mapping - mapping between local and global numbering
730 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
731            IS_GTOLM_DROP - drops the indices with no local value from the output list
732 .   n - number of global indices to map
733 -   idx - global indices to map
734 
735     Output Parameters:
736 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
737 -   idxout - local index of each global index, one must pass in an array long enough
738              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
739              idxout == NULL to determine the required length (returned in nout)
740              and then allocate the required space and call ISGlobalToLocalMappingApply()
741              a second time to set the values.
742 
743     Notes:
744     Either nout or idxout may be NULL. idx and idxout may be identical.
745 
746     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
747     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
748     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
749 
750     Level: advanced
751 
752     Developer Note: The manual page states that idx and idxout may be identical but the calling
753        sequence declares idx as const so it cannot be the same as idxout.
754 
755     Concepts: mapping^global to local
756 
757 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
758           ISLocalToGlobalMappingDestroy()
759 @*/
760 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
761 {
762   PetscErrorCode ierr;
763 
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
766   if (!mapping->data) {
767     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
768   }
769   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 /*@
774     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
775     a new index set using the local numbering defined in an ISLocalToGlobalMapping
776     context.
777 
778     Not collective
779 
780     Input Parameters:
781 +   mapping - mapping between local and global numbering
782 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
783            IS_GTOLM_DROP - drops the indices with no local value from the output list
784 -   is - index set in global numbering
785 
786     Output Parameters:
787 .   newis - index set in local numbering
788 
789     Level: advanced
790 
791     Concepts: mapping^local to global
792 
793 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
794           ISLocalToGlobalMappingDestroy()
795 @*/
796 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
797 {
798   PetscErrorCode ierr;
799   PetscInt       n,nout,*idxout;
800   const PetscInt *idxin;
801 
802   PetscFunctionBegin;
803   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
804   PetscValidHeaderSpecific(is,IS_CLASSID,3);
805   PetscValidPointer(newis,4);
806 
807   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
808   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
809   if (type == IS_GTOLM_MASK) {
810     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
811   } else {
812     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
813     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
814   }
815   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
816   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
817   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
818   PetscFunctionReturn(0);
819 }
820 
821 /*@
822     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
823     specified with a block global numbering.
824 
825     Not collective
826 
827     Input Parameters:
828 +   mapping - mapping between local and global numbering
829 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
830            IS_GTOLM_DROP - drops the indices with no local value from the output list
831 .   n - number of global indices to map
832 -   idx - global indices to map
833 
834     Output Parameters:
835 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
836 -   idxout - local index of each global index, one must pass in an array long enough
837              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
838              idxout == NULL to determine the required length (returned in nout)
839              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
840              a second time to set the values.
841 
842     Notes:
843     Either nout or idxout may be NULL. idx and idxout may be identical.
844 
845     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
846     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
847     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
848 
849     Level: advanced
850 
851     Developer Note: The manual page states that idx and idxout may be identical but the calling
852        sequence declares idx as const so it cannot be the same as idxout.
853 
854     Concepts: mapping^global to local
855 
856 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
857           ISLocalToGlobalMappingDestroy()
858 @*/
859 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
860                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
861 {
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
866   if (!mapping->data) {
867     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
868   }
869   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 
874 /*@C
875     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
876      each index shared by more than one processor
877 
878     Collective on ISLocalToGlobalMapping
879 
880     Input Parameters:
881 .   mapping - the mapping from local to global indexing
882 
883     Output Parameter:
884 +   nproc - number of processors that are connected to this one
885 .   proc - neighboring processors
886 .   numproc - number of indices for each subdomain (processor)
887 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
888 
889     Level: advanced
890 
891     Concepts: mapping^local to global
892 
893     Fortran Usage:
894 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
895 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
896           PetscInt indices[nproc][numprocmax],ierr)
897         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
898         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
899 
900 
901 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
902           ISLocalToGlobalMappingRestoreInfo()
903 @*/
904 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
905 {
906   PetscErrorCode ierr;
907 
908   PetscFunctionBegin;
909   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
910   if (mapping->info_cached) {
911     *nproc    = mapping->info_nproc;
912     *procs    = mapping->info_procs;
913     *numprocs = mapping->info_numprocs;
914     *indices  = mapping->info_indices;
915   } else {
916     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
917   }
918   PetscFunctionReturn(0);
919 }
920 
921 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
922 {
923   PetscErrorCode ierr;
924   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
925   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
926   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
927   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
928   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
929   PetscInt       first_procs,first_numprocs,*first_indices;
930   MPI_Request    *recv_waits,*send_waits;
931   MPI_Status     recv_status,*send_status,*recv_statuses;
932   MPI_Comm       comm;
933   PetscBool      debug = PETSC_FALSE;
934 
935   PetscFunctionBegin;
936   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
937   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
938   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
939   if (size == 1) {
940     *nproc         = 0;
941     *procs         = NULL;
942     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
943     (*numprocs)[0] = 0;
944     ierr           = PetscNew(indices);CHKERRQ(ierr);
945     (*indices)[0]  = NULL;
946     /* save info for reuse */
947     mapping->info_nproc = *nproc;
948     mapping->info_procs = *procs;
949     mapping->info_numprocs = *numprocs;
950     mapping->info_indices = *indices;
951     mapping->info_cached = PETSC_TRUE;
952     PetscFunctionReturn(0);
953   }
954 
955   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
956 
957   /*
958     Notes on ISLocalToGlobalMappingGetBlockInfo
959 
960     globally owned node - the nodes that have been assigned to this processor in global
961            numbering, just for this routine.
962 
963     nontrivial globally owned node - node assigned to this processor that is on a subdomain
964            boundary (i.e. is has more than one local owner)
965 
966     locally owned node - node that exists on this processors subdomain
967 
968     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
969            local subdomain
970   */
971   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
972   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
973   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
974 
975   for (i=0; i<n; i++) {
976     if (lindices[i] > max) max = lindices[i];
977   }
978   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
979   Ng++;
980   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
981   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
982   scale  = Ng/size + 1;
983   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
984   rstart = scale*rank;
985 
986   /* determine ownership ranges of global indices */
987   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
988   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
989 
990   /* determine owners of each local node  */
991   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
992   for (i=0; i<n; i++) {
993     proc             = lindices[i]/scale; /* processor that globally owns this index */
994     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
995     owner[i]         = proc;
996     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
997   }
998   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
999   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1000 
1001   /* inform other processors of number of messages and max length*/
1002   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1003   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1004 
1005   /* post receives for owned rows */
1006   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1007   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1008   for (i=0; i<nrecvs; i++) {
1009     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1010   }
1011 
1012   /* pack messages containing lists of local nodes to owners */
1013   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1014   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1015   starts[0] = 0;
1016   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1017   for (i=0; i<n; i++) {
1018     sends[starts[owner[i]]++] = lindices[i];
1019     sends[starts[owner[i]]++] = i;
1020   }
1021   ierr = PetscFree(owner);CHKERRQ(ierr);
1022   starts[0] = 0;
1023   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1024 
1025   /* send the messages */
1026   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1027   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1028   cnt = 0;
1029   for (i=0; i<size; i++) {
1030     if (nprocs[2*i]) {
1031       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1032       dest[cnt] = i;
1033       cnt++;
1034     }
1035   }
1036   ierr = PetscFree(starts);CHKERRQ(ierr);
1037 
1038   /* wait on receives */
1039   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1040   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1041   cnt  = nrecvs;
1042   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1043   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1044   while (cnt) {
1045     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1046     /* unpack receives into our local space */
1047     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1048     source[imdex] = recv_status.MPI_SOURCE;
1049     len[imdex]    = len[imdex]/2;
1050     /* count how many local owners for each of my global owned indices */
1051     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1052     cnt--;
1053   }
1054   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1055 
1056   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1057   nowned  = 0;
1058   nownedm = 0;
1059   for (i=0; i<ng; i++) {
1060     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1061   }
1062 
1063   /* create single array to contain rank of all local owners of each globally owned index */
1064   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1065   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1066   starts[0] = 0;
1067   for (i=1; i<ng; i++) {
1068     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1069     else starts[i] = starts[i-1];
1070   }
1071 
1072   /* for each nontrival globally owned node list all arriving processors */
1073   for (i=0; i<nrecvs; i++) {
1074     for (j=0; j<len[i]; j++) {
1075       node = recvs[2*i*nmax+2*j]-rstart;
1076       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1077     }
1078   }
1079 
1080   if (debug) { /* -----------------------------------  */
1081     starts[0] = 0;
1082     for (i=1; i<ng; i++) {
1083       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1084       else starts[i] = starts[i-1];
1085     }
1086     for (i=0; i<ng; i++) {
1087       if (nownedsenders[i] > 1) {
1088         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1089         for (j=0; j<nownedsenders[i]; j++) {
1090           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1091         }
1092         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1093       }
1094     }
1095     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1096   } /* -----------------------------------  */
1097 
1098   /* wait on original sends */
1099   if (nsends) {
1100     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1101     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1102     ierr = PetscFree(send_status);CHKERRQ(ierr);
1103   }
1104   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1105   ierr = PetscFree(sends);CHKERRQ(ierr);
1106   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1107 
1108   /* pack messages to send back to local owners */
1109   starts[0] = 0;
1110   for (i=1; i<ng; i++) {
1111     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1112     else starts[i] = starts[i-1];
1113   }
1114   nsends2 = nrecvs;
1115   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1116   for (i=0; i<nrecvs; i++) {
1117     nprocs[i] = 1;
1118     for (j=0; j<len[i]; j++) {
1119       node = recvs[2*i*nmax+2*j]-rstart;
1120       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1121     }
1122   }
1123   nt = 0;
1124   for (i=0; i<nsends2; i++) nt += nprocs[i];
1125 
1126   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1127   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1128 
1129   starts2[0] = 0;
1130   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1131   /*
1132      Each message is 1 + nprocs[i] long, and consists of
1133        (0) the number of nodes being sent back
1134        (1) the local node number,
1135        (2) the number of processors sharing it,
1136        (3) the processors sharing it
1137   */
1138   for (i=0; i<nsends2; i++) {
1139     cnt = 1;
1140     sends2[starts2[i]] = 0;
1141     for (j=0; j<len[i]; j++) {
1142       node = recvs[2*i*nmax+2*j]-rstart;
1143       if (nownedsenders[node] > 1) {
1144         sends2[starts2[i]]++;
1145         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1146         sends2[starts2[i]+cnt++] = nownedsenders[node];
1147         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1148         cnt += nownedsenders[node];
1149       }
1150     }
1151   }
1152 
1153   /* receive the message lengths */
1154   nrecvs2 = nsends;
1155   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1156   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1157   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1158   for (i=0; i<nrecvs2; i++) {
1159     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1160   }
1161 
1162   /* send the message lengths */
1163   for (i=0; i<nsends2; i++) {
1164     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1165   }
1166 
1167   /* wait on receives of lens */
1168   if (nrecvs2) {
1169     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1170     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1171     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1172   }
1173   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1174 
1175   starts3[0] = 0;
1176   nt         = 0;
1177   for (i=0; i<nrecvs2-1; i++) {
1178     starts3[i+1] = starts3[i] + lens2[i];
1179     nt          += lens2[i];
1180   }
1181   if (nrecvs2) nt += lens2[nrecvs2-1];
1182 
1183   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1184   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1185   for (i=0; i<nrecvs2; i++) {
1186     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1187   }
1188 
1189   /* send the messages */
1190   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1191   for (i=0; i<nsends2; i++) {
1192     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1193   }
1194 
1195   /* wait on receives */
1196   if (nrecvs2) {
1197     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1198     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1199     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1200   }
1201   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1202   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1203 
1204   if (debug) { /* -----------------------------------  */
1205     cnt = 0;
1206     for (i=0; i<nrecvs2; i++) {
1207       nt = recvs2[cnt++];
1208       for (j=0; j<nt; j++) {
1209         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1210         for (k=0; k<recvs2[cnt+1]; k++) {
1211           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1212         }
1213         cnt += 2 + recvs2[cnt+1];
1214         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1215       }
1216     }
1217     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1218   } /* -----------------------------------  */
1219 
1220   /* count number subdomains for each local node */
1221   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1222   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1223   cnt  = 0;
1224   for (i=0; i<nrecvs2; i++) {
1225     nt = recvs2[cnt++];
1226     for (j=0; j<nt; j++) {
1227       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1228       cnt += 2 + recvs2[cnt+1];
1229     }
1230   }
1231   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1232   *nproc    = nt;
1233   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1234   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1235   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1236   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1237   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1238   cnt  = 0;
1239   for (i=0; i<size; i++) {
1240     if (nprocs[i] > 0) {
1241       bprocs[i]        = cnt;
1242       (*procs)[cnt]    = i;
1243       (*numprocs)[cnt] = nprocs[i];
1244       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1245       cnt++;
1246     }
1247   }
1248 
1249   /* make the list of subdomains for each nontrivial local node */
1250   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1251   cnt  = 0;
1252   for (i=0; i<nrecvs2; i++) {
1253     nt = recvs2[cnt++];
1254     for (j=0; j<nt; j++) {
1255       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1256       cnt += 2 + recvs2[cnt+1];
1257     }
1258   }
1259   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1260   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1261 
1262   /* sort the node indexing by their global numbers */
1263   nt = *nproc;
1264   for (i=0; i<nt; i++) {
1265     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1266     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1267     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1268     ierr = PetscFree(tmp);CHKERRQ(ierr);
1269   }
1270 
1271   if (debug) { /* -----------------------------------  */
1272     nt = *nproc;
1273     for (i=0; i<nt; i++) {
1274       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1275       for (j=0; j<(*numprocs)[i]; j++) {
1276         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1277       }
1278       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1279     }
1280     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1281   } /* -----------------------------------  */
1282 
1283   /* wait on sends */
1284   if (nsends2) {
1285     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1286     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1287     ierr = PetscFree(send_status);CHKERRQ(ierr);
1288   }
1289 
1290   ierr = PetscFree(starts3);CHKERRQ(ierr);
1291   ierr = PetscFree(dest);CHKERRQ(ierr);
1292   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1293 
1294   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1295   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1296   ierr = PetscFree(starts);CHKERRQ(ierr);
1297   ierr = PetscFree(starts2);CHKERRQ(ierr);
1298   ierr = PetscFree(lens2);CHKERRQ(ierr);
1299 
1300   ierr = PetscFree(source);CHKERRQ(ierr);
1301   ierr = PetscFree(len);CHKERRQ(ierr);
1302   ierr = PetscFree(recvs);CHKERRQ(ierr);
1303   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1304   ierr = PetscFree(sends2);CHKERRQ(ierr);
1305 
1306   /* put the information about myself as the first entry in the list */
1307   first_procs    = (*procs)[0];
1308   first_numprocs = (*numprocs)[0];
1309   first_indices  = (*indices)[0];
1310   for (i=0; i<*nproc; i++) {
1311     if ((*procs)[i] == rank) {
1312       (*procs)[0]    = (*procs)[i];
1313       (*numprocs)[0] = (*numprocs)[i];
1314       (*indices)[0]  = (*indices)[i];
1315       (*procs)[i]    = first_procs;
1316       (*numprocs)[i] = first_numprocs;
1317       (*indices)[i]  = first_indices;
1318       break;
1319     }
1320   }
1321 
1322   /* save info for reuse */
1323   mapping->info_nproc = *nproc;
1324   mapping->info_procs = *procs;
1325   mapping->info_numprocs = *numprocs;
1326   mapping->info_indices = *indices;
1327   mapping->info_cached = PETSC_TRUE;
1328   PetscFunctionReturn(0);
1329 }
1330 
1331 /*@C
1332     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1333 
1334     Collective on ISLocalToGlobalMapping
1335 
1336     Input Parameters:
1337 .   mapping - the mapping from local to global indexing
1338 
1339     Output Parameter:
1340 +   nproc - number of processors that are connected to this one
1341 .   proc - neighboring processors
1342 .   numproc - number of indices for each processor
1343 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1344 
1345     Level: advanced
1346 
1347 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1348           ISLocalToGlobalMappingGetInfo()
1349 @*/
1350 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1351 {
1352   PetscErrorCode ierr;
1353 
1354   PetscFunctionBegin;
1355   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1356   if (mapping->info_free) {
1357     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1358     if (*indices) {
1359       PetscInt i;
1360 
1361       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1362       for (i=1; i<*nproc; i++) {
1363         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1364       }
1365       ierr = PetscFree(*indices);CHKERRQ(ierr);
1366     }
1367   }
1368   *nproc    = 0;
1369   *procs    = NULL;
1370   *numprocs = NULL;
1371   *indices  = NULL;
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 /*@C
1376     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1377      each index shared by more than one processor
1378 
1379     Collective on ISLocalToGlobalMapping
1380 
1381     Input Parameters:
1382 .   mapping - the mapping from local to global indexing
1383 
1384     Output Parameter:
1385 +   nproc - number of processors that are connected to this one
1386 .   proc - neighboring processors
1387 .   numproc - number of indices for each subdomain (processor)
1388 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1389 
1390     Level: advanced
1391 
1392     Concepts: mapping^local to global
1393 
1394     Fortran Usage:
1395 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1396 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1397           PetscInt indices[nproc][numprocmax],ierr)
1398         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1399         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1400 
1401 
1402 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1403           ISLocalToGlobalMappingRestoreInfo()
1404 @*/
1405 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1406 {
1407   PetscErrorCode ierr;
1408   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1409 
1410   PetscFunctionBegin;
1411   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1412   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1413   if (bs > 1) { /* we need to expand the cached info */
1414     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1415     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1416     for (i=0; i<*nproc; i++) {
1417       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1418       for (j=0; j<bnumprocs[i]; j++) {
1419         for (k=0; k<bs; k++) {
1420           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1421         }
1422       }
1423       (*numprocs)[i] = bnumprocs[i]*bs;
1424     }
1425     mapping->info_free = PETSC_TRUE;
1426   } else {
1427     *numprocs = bnumprocs;
1428     *indices  = bindices;
1429   }
1430   PetscFunctionReturn(0);
1431 }
1432 
1433 /*@C
1434     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1435 
1436     Collective on ISLocalToGlobalMapping
1437 
1438     Input Parameters:
1439 .   mapping - the mapping from local to global indexing
1440 
1441     Output Parameter:
1442 +   nproc - number of processors that are connected to this one
1443 .   proc - neighboring processors
1444 .   numproc - number of indices for each processor
1445 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1446 
1447     Level: advanced
1448 
1449 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1450           ISLocalToGlobalMappingGetInfo()
1451 @*/
1452 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1453 {
1454   PetscErrorCode ierr;
1455 
1456   PetscFunctionBegin;
1457   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 /*@C
1462    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1463 
1464    Not Collective
1465 
1466    Input Arguments:
1467 . ltog - local to global mapping
1468 
1469    Output Arguments:
1470 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1471 
1472    Level: advanced
1473 
1474    Notes:
1475     ISLocalToGlobalMappingGetSize() returns the length the this array
1476 
1477 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1478 @*/
1479 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1480 {
1481   PetscFunctionBegin;
1482   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1483   PetscValidPointer(array,2);
1484   if (ltog->bs == 1) {
1485     *array = ltog->indices;
1486   } else {
1487     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1488     const PetscInt *ii;
1489     PetscErrorCode ierr;
1490 
1491     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1492     *array = jj;
1493     k    = 0;
1494     ii   = ltog->indices;
1495     for (i=0; i<n; i++)
1496       for (j=0; j<bs; j++)
1497         jj[k++] = bs*ii[i] + j;
1498   }
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 /*@C
1503    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1504 
1505    Not Collective
1506 
1507    Input Arguments:
1508 + ltog - local to global mapping
1509 - array - array of indices
1510 
1511    Level: advanced
1512 
1513 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1514 @*/
1515 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1516 {
1517   PetscFunctionBegin;
1518   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1519   PetscValidPointer(array,2);
1520   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1521 
1522   if (ltog->bs > 1) {
1523     PetscErrorCode ierr;
1524     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1525   }
1526   PetscFunctionReturn(0);
1527 }
1528 
1529 /*@C
1530    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1531 
1532    Not Collective
1533 
1534    Input Arguments:
1535 . ltog - local to global mapping
1536 
1537    Output Arguments:
1538 . array - array of indices
1539 
1540    Level: advanced
1541 
1542 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1543 @*/
1544 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1545 {
1546   PetscFunctionBegin;
1547   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1548   PetscValidPointer(array,2);
1549   *array = ltog->indices;
1550   PetscFunctionReturn(0);
1551 }
1552 
1553 /*@C
1554    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1555 
1556    Not Collective
1557 
1558    Input Arguments:
1559 + ltog - local to global mapping
1560 - array - array of indices
1561 
1562    Level: advanced
1563 
1564 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1565 @*/
1566 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1567 {
1568   PetscFunctionBegin;
1569   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1570   PetscValidPointer(array,2);
1571   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1572   *array = NULL;
1573   PetscFunctionReturn(0);
1574 }
1575 
1576 /*@C
1577    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1578 
1579    Not Collective
1580 
1581    Input Arguments:
1582 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1583 . n - number of mappings to concatenate
1584 - ltogs - local to global mappings
1585 
1586    Output Arguments:
1587 . ltogcat - new mapping
1588 
1589    Note: this currently always returns a mapping with block size of 1
1590 
1591    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1592 
1593    Level: advanced
1594 
1595 .seealso: ISLocalToGlobalMappingCreate()
1596 @*/
1597 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1598 {
1599   PetscInt       i,cnt,m,*idx;
1600   PetscErrorCode ierr;
1601 
1602   PetscFunctionBegin;
1603   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1604   if (n > 0) PetscValidPointer(ltogs,3);
1605   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1606   PetscValidPointer(ltogcat,4);
1607   for (cnt=0,i=0; i<n; i++) {
1608     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1609     cnt += m;
1610   }
1611   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1612   for (cnt=0,i=0; i<n; i++) {
1613     const PetscInt *subidx;
1614     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1615     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1616     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1617     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1618     cnt += m;
1619   }
1620   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1621   PetscFunctionReturn(0);
1622 }
1623 
1624 /*MC
1625       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1626                                     used this is good for only small and moderate size problems.
1627 
1628    Options Database Keys:
1629 +   -islocaltoglobalmapping_type basic - select this method
1630 
1631    Level: beginner
1632 
1633 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1634 M*/
1635 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1636 {
1637   PetscFunctionBegin;
1638   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1639   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1640   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1641   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1642   PetscFunctionReturn(0);
1643 }
1644 
1645 /*MC
1646       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1647                                     used this is good for large memory problems.
1648 
1649    Options Database Keys:
1650 +   -islocaltoglobalmapping_type hash - select this method
1651 
1652 
1653    Notes:
1654     This is selected automatically for large problems if the user does not set the type.
1655 
1656    Level: beginner
1657 
1658 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1659 M*/
1660 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1661 {
1662   PetscFunctionBegin;
1663   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1664   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1665   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1666   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1667   PetscFunctionReturn(0);
1668 }
1669 
1670 
1671 /*@C
1672     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1673 
1674    Not Collective
1675 
1676    Input Parameters:
1677 +  sname - name of a new method
1678 -  routine_create - routine to create method context
1679 
1680    Notes:
1681    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1682 
1683    Sample usage:
1684 .vb
1685    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1686 .ve
1687 
1688    Then, your mapping can be chosen with the procedural interface via
1689 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1690    or at runtime via the option
1691 $     -islocaltoglobalmapping_type my_mapper
1692 
1693    Level: advanced
1694 
1695 .keywords: ISLocalToGlobalMappingType, register
1696 
1697 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1698 
1699 @*/
1700 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1701 {
1702   PetscErrorCode ierr;
1703 
1704   PetscFunctionBegin;
1705   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1706   PetscFunctionReturn(0);
1707 }
1708 
1709 /*@C
1710    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1711 
1712    Logically Collective on ISLocalToGlobalMapping
1713 
1714    Input Parameters:
1715 +  ltog - the ISLocalToGlobalMapping object
1716 -  type - a known method
1717 
1718    Options Database Key:
1719 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1720     of available methods (for instance, basic or hash)
1721 
1722    Notes:
1723    See "petsc/include/petscis.h" for available methods
1724 
1725   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1726   then set the ISLocalToGlobalMapping type from the options database rather than by using
1727   this routine.
1728 
1729   Level: intermediate
1730 
1731   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1732   are accessed by ISLocalToGlobalMappingSetType().
1733 
1734 .keywords: ISLocalToGlobalMapping, set, method
1735 
1736 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1737 
1738 @*/
1739 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1740 {
1741   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1742   PetscBool      match;
1743 
1744   PetscFunctionBegin;
1745   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1746   PetscValidCharPointer(type,2);
1747 
1748   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1749   if (match) PetscFunctionReturn(0);
1750 
1751   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1752   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1753   /* Destroy the previous private LTOG context */
1754   if (ltog->ops->destroy) {
1755     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1756     ltog->ops->destroy = NULL;
1757   }
1758   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1759   ierr = (*r)(ltog);CHKERRQ(ierr);
1760   PetscFunctionReturn(0);
1761 }
1762 
1763 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1764 
1765 /*@C
1766   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1767 
1768   Not Collective
1769 
1770   Level: advanced
1771 
1772 .keywords: IS, register, all
1773 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1774 @*/
1775 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1776 {
1777   PetscErrorCode ierr;
1778 
1779   PetscFunctionBegin;
1780   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1781   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1782 
1783   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1784   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1785   PetscFunctionReturn(0);
1786 }
1787 
1788