xref: /petsc/src/vec/is/utils/isltog.c (revision 1ea09cb22e07962eaf12266eaa13077094975bb1)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hash.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt    *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHashI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 /* -----------------------------------------------------------------------------------------*/
20 
21 /*
22     Creates the global mapping information in the ISLocalToGlobalMapping structure
23 
24     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
25 */
26 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
27 {
28   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   if (mapping->data) PetscFunctionReturn(0);
33   end   = 0;
34   start = PETSC_MAX_INT;
35 
36   for (i=0; i<n; i++) {
37     if (idx[i] < 0) continue;
38     if (idx[i] < start) start = idx[i];
39     if (idx[i] > end)   end   = idx[i];
40   }
41   if (start > end) {start = 0; end = -1;}
42   mapping->globalstart = start;
43   mapping->globalend   = end;
44   if (!((PetscObject)mapping)->type_name) {
45     if ((end - start) > PetscMax(4*n,1000000)) {
46       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
47     } else {
48       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
49     }
50   }
51   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
56 {
57   PetscErrorCode              ierr;
58   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
59   ISLocalToGlobalMapping_Basic *map;
60 
61   PetscFunctionBegin;
62   start            = mapping->globalstart;
63   end              = mapping->globalend;
64   ierr             = PetscNew(&map);CHKERRQ(ierr);
65   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
66   map->globals     = globals;
67   for (i=0; i<end-start+1; i++) globals[i] = -1;
68   for (i=0; i<n; i++) {
69     if (idx[i] < 0) continue;
70     globals[idx[i] - start] = i;
71   }
72   mapping->data = (void*)map;
73   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
78 {
79   PetscErrorCode              ierr;
80   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
81   ISLocalToGlobalMapping_Hash *map;
82 
83   PetscFunctionBegin;
84   ierr = PetscNew(&map);CHKERRQ(ierr);
85   PetscHashICreate(map->globalht);
86   for (i=0; i<n; i++ ) {
87     if (idx[i] < 0) continue;
88     PetscHashIAdd(map->globalht, idx[i], i);
89   }
90   mapping->data = (void*)map;
91   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
92   PetscFunctionReturn(0);
93 }
94 
95 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
96 {
97   PetscErrorCode              ierr;
98   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
99 
100   PetscFunctionBegin;
101   if (!map) PetscFunctionReturn(0);
102   ierr = PetscFree(map->globals);CHKERRQ(ierr);
103   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
108 {
109   PetscErrorCode              ierr;
110   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
111 
112   PetscFunctionBegin;
113   if (!map) PetscFunctionReturn(0);
114   PetscHashIDestroy(map->globalht);
115   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
116   PetscFunctionReturn(0);
117 }
118 
119 #define GTOLTYPE _Basic
120 #define GTOLNAME _Basic
121 #define GTOLBS mapping->bs
122 #define GTOL(g, local) do {                  \
123     local = map->globals[g/bs - start];      \
124     local = bs*local + (g % bs);             \
125   } while (0)
126 
127 #include <../src/vec/is/utils/isltog.h>
128 
129 #define GTOLTYPE _Basic
130 #define GTOLNAME Block_Basic
131 #define GTOLBS 1
132 #define GTOL(g, local) do {                  \
133     local = map->globals[g - start];         \
134   } while (0)
135 #include <../src/vec/is/utils/isltog.h>
136 
137 #define GTOLTYPE _Hash
138 #define GTOLNAME _Hash
139 #define GTOLBS mapping->bs
140 #define GTOL(g, local) do {                  \
141     PetscHashIMap(map->globalht,g/bs,local); \
142     local = bs*local + (g % bs);             \
143    } while (0)
144 #include <../src/vec/is/utils/isltog.h>
145 
146 #define GTOLTYPE _Hash
147 #define GTOLNAME Block_Hash
148 #define GTOLBS 1
149 #define GTOL(g, local) do {                  \
150     PetscHashIMap(map->globalht,g,local);    \
151   } while (0)
152 #include <../src/vec/is/utils/isltog.h>
153 
154 /*@
155     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
156 
157     Not Collective
158 
159     Input Parameter:
160 .   ltog - local to global mapping
161 
162     Output Parameter:
163 .   nltog - the duplicated local to global mapping
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
170 @*/
171 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
172 {
173   PetscErrorCode ierr;
174 
175   PetscFunctionBegin;
176   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
177   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 /*@
182     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
183 
184     Not Collective
185 
186     Input Parameter:
187 .   ltog - local to global mapping
188 
189     Output Parameter:
190 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
191 
192     Level: advanced
193 
194     Concepts: mapping^local to global
195 
196 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
197 @*/
198 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
199 {
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
202   PetscValidIntPointer(n,2);
203   *n = mapping->bs*mapping->n;
204   PetscFunctionReturn(0);
205 }
206 
207 /*@C
208     ISLocalToGlobalMappingView - View a local to global mapping
209 
210     Not Collective
211 
212     Input Parameters:
213 +   ltog - local to global mapping
214 -   viewer - viewer
215 
216     Level: advanced
217 
218     Concepts: mapping^local to global
219 
220 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
221 @*/
222 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
223 {
224   PetscInt       i;
225   PetscMPIInt    rank;
226   PetscBool      iascii;
227   PetscErrorCode ierr;
228 
229   PetscFunctionBegin;
230   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
231   if (!viewer) {
232     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
233   }
234   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
235 
236   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
237   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
238   if (iascii) {
239     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
240     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
241     for (i=0; i<mapping->n; i++) {
242       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
243     }
244     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
245     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
246   }
247   PetscFunctionReturn(0);
248 }
249 
250 /*@
251     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
252     ordering and a global parallel ordering.
253 
254     Not collective
255 
256     Input Parameter:
257 .   is - index set containing the global numbers for each local number
258 
259     Output Parameter:
260 .   mapping - new mapping data structure
261 
262     Notes: the block size of the IS determines the block size of the mapping
263     Level: advanced
264 
265     Concepts: mapping^local to global
266 
267 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
268 @*/
269 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
270 {
271   PetscErrorCode ierr;
272   PetscInt       n,bs;
273   const PetscInt *indices;
274   MPI_Comm       comm;
275   PetscBool      isblock;
276 
277   PetscFunctionBegin;
278   PetscValidHeaderSpecific(is,IS_CLASSID,1);
279   PetscValidPointer(mapping,2);
280 
281   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
282   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
283   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
284   if (!isblock) {
285     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
286     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
287     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
288   } else {
289     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
290     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
291     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
292     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
293   }
294   PetscFunctionReturn(0);
295 }
296 
297 /*@C
298     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
299     ordering and a global parallel ordering.
300 
301     Collective
302 
303     Input Parameter:
304 +   sf - star forest mapping contiguous local indices to (rank, offset)
305 -   start - first global index on this process
306 
307     Output Parameter:
308 .   mapping - new mapping data structure
309 
310     Level: advanced
311 
312     Concepts: mapping^local to global
313 
314 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
315 @*/
316 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
317 {
318   PetscErrorCode ierr;
319   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
320   const PetscInt *ilocal;
321   MPI_Comm       comm;
322 
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
325   PetscValidPointer(mapping,3);
326 
327   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
328   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
329   if (ilocal) {
330     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
331   }
332   else maxlocal = nleaves;
333   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
334   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
335   for (i=0; i<nroots; i++) globals[i] = start + i;
336   for (i=0; i<maxlocal; i++) ltog[i] = -1;
337   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
338   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
339   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
340   ierr = PetscFree(globals);CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 /*@
345     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
346 
347     Not collective
348 
349     Input Parameters:
350 .   mapping - mapping data structure
351 .   bs - the blocksize
352 
353     Level: advanced
354 
355     Concepts: mapping^local to global
356 
357 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
358 @*/
359 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
360 {
361   PetscInt       *nid;
362   const PetscInt *oid;
363   PetscInt       i,cn,on,obs,nn;
364   PetscErrorCode ierr;
365 
366   PetscFunctionBegin;
367   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
368   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
369   if (bs == mapping->bs) PetscFunctionReturn(0);
370   on  = mapping->n;
371   obs = mapping->bs;
372   oid = mapping->indices;
373   nn  = (on*obs)/bs;
374   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
375 
376   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
377   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
378   for (i=0;i<nn;i++) {
379     PetscInt j;
380     for (j=0,cn=0;j<bs-1;j++) {
381       if (oid[i*bs+j] < 0) { cn++; continue; }
382       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
383     }
384     if (oid[i*bs+j] < 0) cn++;
385     if (cn) {
386       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
387       nid[i] = -1;
388     } else {
389       nid[i] = oid[i*bs]/bs;
390     }
391   }
392   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
393 
394   mapping->n           = nn;
395   mapping->bs          = bs;
396   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
397   mapping->indices     = nid;
398   mapping->globalstart = 0;
399   mapping->globalend   = 0;
400   if (mapping->ops->destroy) {
401     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
402   }
403   PetscFunctionReturn(0);
404 }
405 
406 /*@
407     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
408     ordering and a global parallel ordering.
409 
410     Not Collective
411 
412     Input Parameters:
413 .   mapping - mapping data structure
414 
415     Output Parameter:
416 .   bs - the blocksize
417 
418     Level: advanced
419 
420     Concepts: mapping^local to global
421 
422 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
423 @*/
424 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
425 {
426   PetscFunctionBegin;
427   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
428   *bs = mapping->bs;
429   PetscFunctionReturn(0);
430 }
431 
432 /*@
433     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
434     ordering and a global parallel ordering.
435 
436     Not Collective, but communicator may have more than one process
437 
438     Input Parameters:
439 +   comm - MPI communicator
440 .   bs - the block size
441 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
442 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
443 -   mode - see PetscCopyMode
444 
445     Output Parameter:
446 .   mapping - new mapping data structure
447 
448     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
449 
450     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
451     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
452     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
453 
454     Level: advanced
455 
456     Concepts: mapping^local to global
457 
458 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
459           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
460 @*/
461 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
462 {
463   PetscErrorCode ierr;
464   PetscInt       *in;
465 
466   PetscFunctionBegin;
467   if (n) PetscValidIntPointer(indices,3);
468   PetscValidPointer(mapping,4);
469 
470   *mapping = NULL;
471   ierr = ISInitializePackage();CHKERRQ(ierr);
472 
473   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
474                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
475   (*mapping)->n             = n;
476   (*mapping)->bs            = bs;
477   (*mapping)->info_cached   = PETSC_FALSE;
478   (*mapping)->info_free     = PETSC_FALSE;
479   (*mapping)->info_procs    = NULL;
480   (*mapping)->info_numprocs = NULL;
481   (*mapping)->info_indices  = NULL;
482 
483   (*mapping)->ops->globaltolocalmappingapply      = NULL;
484   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
485   (*mapping)->ops->destroy                        = NULL;
486   if (mode == PETSC_COPY_VALUES) {
487     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
488     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
489     (*mapping)->indices = in;
490     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
491   } else if (mode == PETSC_OWN_POINTER) {
492     (*mapping)->indices = (PetscInt*)indices;
493     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
494   }
495   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
496   PetscFunctionReturn(0);
497 }
498 
499 PetscFunctionList ISLocalToGlobalMappingList = NULL;
500 
501 
502 /*@
503    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
504 
505    Not collective
506 
507    Input Parameters:
508 .  mapping - mapping data structure
509 
510    Level: advanced
511 
512 @*/
513 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
514 {
515   PetscErrorCode             ierr;
516   char                       type[256];
517   ISLocalToGlobalMappingType defaulttype = "Not set";
518   PetscBool                  flg;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
522   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
523   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
524   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
525   if (flg) {
526     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
527   }
528   ierr = PetscOptionsEnd();CHKERRQ(ierr);
529   PetscFunctionReturn(0);
530 }
531 
532 /*@
533    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
534    ordering and a global parallel ordering.
535 
536    Note Collective
537 
538    Input Parameters:
539 .  mapping - mapping data structure
540 
541    Level: advanced
542 
543 .seealso: ISLocalToGlobalMappingCreate()
544 @*/
545 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
546 {
547   PetscErrorCode ierr;
548 
549   PetscFunctionBegin;
550   if (!*mapping) PetscFunctionReturn(0);
551   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
552   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
553   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
554   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
555   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
556   if ((*mapping)->info_indices) {
557     PetscInt i;
558 
559     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
560     for (i=1; i<(*mapping)->info_nproc; i++) {
561       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
562     }
563     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
564   }
565   if ((*mapping)->ops->destroy) {
566     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
567   }
568   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
569   *mapping = 0;
570   PetscFunctionReturn(0);
571 }
572 
573 /*@
574     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
575     a new index set using the global numbering defined in an ISLocalToGlobalMapping
576     context.
577 
578     Not collective
579 
580     Input Parameters:
581 +   mapping - mapping between local and global numbering
582 -   is - index set in local numbering
583 
584     Output Parameters:
585 .   newis - index set in global numbering
586 
587     Level: advanced
588 
589     Concepts: mapping^local to global
590 
591 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
592           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
593 @*/
594 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
595 {
596   PetscErrorCode ierr;
597   PetscInt       n,*idxout;
598   const PetscInt *idxin;
599 
600   PetscFunctionBegin;
601   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
602   PetscValidHeaderSpecific(is,IS_CLASSID,2);
603   PetscValidPointer(newis,3);
604 
605   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
606   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
607   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
608   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
609   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
610   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
611   PetscFunctionReturn(0);
612 }
613 
614 /*@
615    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
616    and converts them to the global numbering.
617 
618    Not collective
619 
620    Input Parameters:
621 +  mapping - the local to global mapping context
622 .  N - number of integers
623 -  in - input indices in local numbering
624 
625    Output Parameter:
626 .  out - indices in global numbering
627 
628    Notes:
629    The in and out array parameters may be identical.
630 
631    Level: advanced
632 
633 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
634           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
635           AOPetscToApplication(), ISGlobalToLocalMappingApply()
636 
637     Concepts: mapping^local to global
638 @*/
639 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
640 {
641   PetscInt i,bs,Nmax;
642 
643   PetscFunctionBegin;
644   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
645   bs   = mapping->bs;
646   Nmax = bs*mapping->n;
647   if (bs == 1) {
648     const PetscInt *idx = mapping->indices;
649     for (i=0; i<N; i++) {
650       if (in[i] < 0) {
651         out[i] = in[i];
652         continue;
653       }
654       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
655       out[i] = idx[in[i]];
656     }
657   } else {
658     const PetscInt *idx = mapping->indices;
659     for (i=0; i<N; i++) {
660       if (in[i] < 0) {
661         out[i] = in[i];
662         continue;
663       }
664       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
665       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
666     }
667   }
668   PetscFunctionReturn(0);
669 }
670 
671 /*@
672    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
673 
674    Not collective
675 
676    Input Parameters:
677 +  mapping - the local to global mapping context
678 .  N - number of integers
679 -  in - input indices in local block numbering
680 
681    Output Parameter:
682 .  out - indices in global block numbering
683 
684    Notes:
685    The in and out array parameters may be identical.
686 
687    Example:
688      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
689      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
690 
691    Level: advanced
692 
693 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
694           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
695           AOPetscToApplication(), ISGlobalToLocalMappingApply()
696 
697     Concepts: mapping^local to global
698 @*/
699 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
700 {
701 
702   PetscFunctionBegin;
703   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
704   {
705     PetscInt i,Nmax = mapping->n;
706     const PetscInt *idx = mapping->indices;
707 
708     for (i=0; i<N; i++) {
709       if (in[i] < 0) {
710         out[i] = in[i];
711         continue;
712       }
713       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
714       out[i] = idx[in[i]];
715     }
716   }
717   PetscFunctionReturn(0);
718 }
719 
720 /*@
721     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
722     specified with a global numbering.
723 
724     Not collective
725 
726     Input Parameters:
727 +   mapping - mapping between local and global numbering
728 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
729            IS_GTOLM_DROP - drops the indices with no local value from the output list
730 .   n - number of global indices to map
731 -   idx - global indices to map
732 
733     Output Parameters:
734 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
735 -   idxout - local index of each global index, one must pass in an array long enough
736              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
737              idxout == NULL to determine the required length (returned in nout)
738              and then allocate the required space and call ISGlobalToLocalMappingApply()
739              a second time to set the values.
740 
741     Notes:
742     Either nout or idxout may be NULL. idx and idxout may be identical.
743 
744     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
745     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
746     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
747 
748     Level: advanced
749 
750     Developer Note: The manual page states that idx and idxout may be identical but the calling
751        sequence declares idx as const so it cannot be the same as idxout.
752 
753     Concepts: mapping^global to local
754 
755 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
756           ISLocalToGlobalMappingDestroy()
757 @*/
758 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
759 {
760   PetscErrorCode ierr;
761 
762   PetscFunctionBegin;
763   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
764   if (!mapping->data) {
765     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
766   }
767   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
768   PetscFunctionReturn(0);
769 }
770 
771 /*@
772     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
773     a new index set using the local numbering defined in an ISLocalToGlobalMapping
774     context.
775 
776     Not collective
777 
778     Input Parameters:
779 +   mapping - mapping between local and global numbering
780 -   is - index set in global numbering
781 
782     Output Parameters:
783 .   newis - index set in local numbering
784 
785     Level: advanced
786 
787     Concepts: mapping^local to global
788 
789 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
790           ISLocalToGlobalMappingDestroy()
791 @*/
792 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, IS is,IS *newis)
793 {
794   PetscErrorCode ierr;
795   PetscInt       n,nout,*idxout;
796   const PetscInt *idxin;
797 
798   PetscFunctionBegin;
799   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
800   PetscValidHeaderSpecific(is,IS_CLASSID,3);
801   PetscValidPointer(newis,4);
802 
803   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
804   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
805   if (type == IS_GTOLM_MASK) {
806     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
807   } else {
808     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
809     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
810   }
811   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
812   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
813   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 /*@
818     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
819     specified with a block global numbering.
820 
821     Not collective
822 
823     Input Parameters:
824 +   mapping - mapping between local and global numbering
825 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
826            IS_GTOLM_DROP - drops the indices with no local value from the output list
827 .   n - number of global indices to map
828 -   idx - global indices to map
829 
830     Output Parameters:
831 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
832 -   idxout - local index of each global index, one must pass in an array long enough
833              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
834              idxout == NULL to determine the required length (returned in nout)
835              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
836              a second time to set the values.
837 
838     Notes:
839     Either nout or idxout may be NULL. idx and idxout may be identical.
840 
841     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
842     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
843     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
844 
845     Level: advanced
846 
847     Developer Note: The manual page states that idx and idxout may be identical but the calling
848        sequence declares idx as const so it cannot be the same as idxout.
849 
850     Concepts: mapping^global to local
851 
852 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
853           ISLocalToGlobalMappingDestroy()
854 @*/
855 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
856                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
857 {
858   PetscErrorCode ierr;
859 
860   PetscFunctionBegin;
861   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
862   if (!mapping->data) {
863     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
864   }
865   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
866   PetscFunctionReturn(0);
867 }
868 
869 
870 /*@C
871     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
872      each index shared by more than one processor
873 
874     Collective on ISLocalToGlobalMapping
875 
876     Input Parameters:
877 .   mapping - the mapping from local to global indexing
878 
879     Output Parameter:
880 +   nproc - number of processors that are connected to this one
881 .   proc - neighboring processors
882 .   numproc - number of indices for each subdomain (processor)
883 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
884 
885     Level: advanced
886 
887     Concepts: mapping^local to global
888 
889     Fortran Usage:
890 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
891 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
892           PetscInt indices[nproc][numprocmax],ierr)
893         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
894         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
895 
896 
897 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
898           ISLocalToGlobalMappingRestoreInfo()
899 @*/
900 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
901 {
902   PetscErrorCode ierr;
903 
904   PetscFunctionBegin;
905   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
906   if (mapping->info_cached) {
907     *nproc    = mapping->info_nproc;
908     *procs    = mapping->info_procs;
909     *numprocs = mapping->info_numprocs;
910     *indices  = mapping->info_indices;
911   } else {
912     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
913   }
914   PetscFunctionReturn(0);
915 }
916 
917 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
918 {
919   PetscErrorCode ierr;
920   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
921   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
922   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
923   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
924   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
925   PetscInt       first_procs,first_numprocs,*first_indices;
926   MPI_Request    *recv_waits,*send_waits;
927   MPI_Status     recv_status,*send_status,*recv_statuses;
928   MPI_Comm       comm;
929   PetscBool      debug = PETSC_FALSE;
930 
931   PetscFunctionBegin;
932   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
933   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
934   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
935   if (size == 1) {
936     *nproc         = 0;
937     *procs         = NULL;
938     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
939     (*numprocs)[0] = 0;
940     ierr           = PetscNew(indices);CHKERRQ(ierr);
941     (*indices)[0]  = NULL;
942     /* save info for reuse */
943     mapping->info_nproc = *nproc;
944     mapping->info_procs = *procs;
945     mapping->info_numprocs = *numprocs;
946     mapping->info_indices = *indices;
947     mapping->info_cached = PETSC_TRUE;
948     PetscFunctionReturn(0);
949   }
950 
951   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
952 
953   /*
954     Notes on ISLocalToGlobalMappingGetBlockInfo
955 
956     globally owned node - the nodes that have been assigned to this processor in global
957            numbering, just for this routine.
958 
959     nontrivial globally owned node - node assigned to this processor that is on a subdomain
960            boundary (i.e. is has more than one local owner)
961 
962     locally owned node - node that exists on this processors subdomain
963 
964     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
965            local subdomain
966   */
967   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
968   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
969   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
970 
971   for (i=0; i<n; i++) {
972     if (lindices[i] > max) max = lindices[i];
973   }
974   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
975   Ng++;
976   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
977   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
978   scale  = Ng/size + 1;
979   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
980   rstart = scale*rank;
981 
982   /* determine ownership ranges of global indices */
983   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
984   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
985 
986   /* determine owners of each local node  */
987   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
988   for (i=0; i<n; i++) {
989     proc             = lindices[i]/scale; /* processor that globally owns this index */
990     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
991     owner[i]         = proc;
992     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
993   }
994   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
995   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
996 
997   /* inform other processors of number of messages and max length*/
998   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
999   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1000 
1001   /* post receives for owned rows */
1002   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1003   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1004   for (i=0; i<nrecvs; i++) {
1005     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1006   }
1007 
1008   /* pack messages containing lists of local nodes to owners */
1009   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1010   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1011   starts[0] = 0;
1012   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1013   for (i=0; i<n; i++) {
1014     sends[starts[owner[i]]++] = lindices[i];
1015     sends[starts[owner[i]]++] = i;
1016   }
1017   ierr = PetscFree(owner);CHKERRQ(ierr);
1018   starts[0] = 0;
1019   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1020 
1021   /* send the messages */
1022   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1023   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1024   cnt = 0;
1025   for (i=0; i<size; i++) {
1026     if (nprocs[2*i]) {
1027       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1028       dest[cnt] = i;
1029       cnt++;
1030     }
1031   }
1032   ierr = PetscFree(starts);CHKERRQ(ierr);
1033 
1034   /* wait on receives */
1035   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1036   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1037   cnt  = nrecvs;
1038   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1039   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1040   while (cnt) {
1041     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1042     /* unpack receives into our local space */
1043     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1044     source[imdex] = recv_status.MPI_SOURCE;
1045     len[imdex]    = len[imdex]/2;
1046     /* count how many local owners for each of my global owned indices */
1047     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1048     cnt--;
1049   }
1050   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1051 
1052   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1053   nowned  = 0;
1054   nownedm = 0;
1055   for (i=0; i<ng; i++) {
1056     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1057   }
1058 
1059   /* create single array to contain rank of all local owners of each globally owned index */
1060   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1061   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1062   starts[0] = 0;
1063   for (i=1; i<ng; i++) {
1064     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1065     else starts[i] = starts[i-1];
1066   }
1067 
1068   /* for each nontrival globally owned node list all arriving processors */
1069   for (i=0; i<nrecvs; i++) {
1070     for (j=0; j<len[i]; j++) {
1071       node = recvs[2*i*nmax+2*j]-rstart;
1072       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1073     }
1074   }
1075 
1076   if (debug) { /* -----------------------------------  */
1077     starts[0] = 0;
1078     for (i=1; i<ng; i++) {
1079       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1080       else starts[i] = starts[i-1];
1081     }
1082     for (i=0; i<ng; i++) {
1083       if (nownedsenders[i] > 1) {
1084         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1085         for (j=0; j<nownedsenders[i]; j++) {
1086           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1087         }
1088         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1089       }
1090     }
1091     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1092   } /* -----------------------------------  */
1093 
1094   /* wait on original sends */
1095   if (nsends) {
1096     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1097     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1098     ierr = PetscFree(send_status);CHKERRQ(ierr);
1099   }
1100   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1101   ierr = PetscFree(sends);CHKERRQ(ierr);
1102   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1103 
1104   /* pack messages to send back to local owners */
1105   starts[0] = 0;
1106   for (i=1; i<ng; i++) {
1107     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1108     else starts[i] = starts[i-1];
1109   }
1110   nsends2 = nrecvs;
1111   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1112   for (i=0; i<nrecvs; i++) {
1113     nprocs[i] = 1;
1114     for (j=0; j<len[i]; j++) {
1115       node = recvs[2*i*nmax+2*j]-rstart;
1116       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1117     }
1118   }
1119   nt = 0;
1120   for (i=0; i<nsends2; i++) nt += nprocs[i];
1121 
1122   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1123   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1124 
1125   starts2[0] = 0;
1126   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1127   /*
1128      Each message is 1 + nprocs[i] long, and consists of
1129        (0) the number of nodes being sent back
1130        (1) the local node number,
1131        (2) the number of processors sharing it,
1132        (3) the processors sharing it
1133   */
1134   for (i=0; i<nsends2; i++) {
1135     cnt = 1;
1136     sends2[starts2[i]] = 0;
1137     for (j=0; j<len[i]; j++) {
1138       node = recvs[2*i*nmax+2*j]-rstart;
1139       if (nownedsenders[node] > 1) {
1140         sends2[starts2[i]]++;
1141         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1142         sends2[starts2[i]+cnt++] = nownedsenders[node];
1143         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1144         cnt += nownedsenders[node];
1145       }
1146     }
1147   }
1148 
1149   /* receive the message lengths */
1150   nrecvs2 = nsends;
1151   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1152   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1153   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1154   for (i=0; i<nrecvs2; i++) {
1155     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1156   }
1157 
1158   /* send the message lengths */
1159   for (i=0; i<nsends2; i++) {
1160     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1161   }
1162 
1163   /* wait on receives of lens */
1164   if (nrecvs2) {
1165     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1166     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1167     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1168   }
1169   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1170 
1171   starts3[0] = 0;
1172   nt         = 0;
1173   for (i=0; i<nrecvs2-1; i++) {
1174     starts3[i+1] = starts3[i] + lens2[i];
1175     nt          += lens2[i];
1176   }
1177   if (nrecvs2) nt += lens2[nrecvs2-1];
1178 
1179   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1180   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1181   for (i=0; i<nrecvs2; i++) {
1182     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1183   }
1184 
1185   /* send the messages */
1186   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1187   for (i=0; i<nsends2; i++) {
1188     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1189   }
1190 
1191   /* wait on receives */
1192   if (nrecvs2) {
1193     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1194     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1195     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1196   }
1197   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1198   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1199 
1200   if (debug) { /* -----------------------------------  */
1201     cnt = 0;
1202     for (i=0; i<nrecvs2; i++) {
1203       nt = recvs2[cnt++];
1204       for (j=0; j<nt; j++) {
1205         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1206         for (k=0; k<recvs2[cnt+1]; k++) {
1207           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1208         }
1209         cnt += 2 + recvs2[cnt+1];
1210         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1211       }
1212     }
1213     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1214   } /* -----------------------------------  */
1215 
1216   /* count number subdomains for each local node */
1217   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1218   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1219   cnt  = 0;
1220   for (i=0; i<nrecvs2; i++) {
1221     nt = recvs2[cnt++];
1222     for (j=0; j<nt; j++) {
1223       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1224       cnt += 2 + recvs2[cnt+1];
1225     }
1226   }
1227   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1228   *nproc    = nt;
1229   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1230   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1231   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1232   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1233   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1234   cnt  = 0;
1235   for (i=0; i<size; i++) {
1236     if (nprocs[i] > 0) {
1237       bprocs[i]        = cnt;
1238       (*procs)[cnt]    = i;
1239       (*numprocs)[cnt] = nprocs[i];
1240       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1241       cnt++;
1242     }
1243   }
1244 
1245   /* make the list of subdomains for each nontrivial local node */
1246   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1247   cnt  = 0;
1248   for (i=0; i<nrecvs2; i++) {
1249     nt = recvs2[cnt++];
1250     for (j=0; j<nt; j++) {
1251       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1252       cnt += 2 + recvs2[cnt+1];
1253     }
1254   }
1255   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1256   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1257 
1258   /* sort the node indexing by their global numbers */
1259   nt = *nproc;
1260   for (i=0; i<nt; i++) {
1261     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1262     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1263     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1264     ierr = PetscFree(tmp);CHKERRQ(ierr);
1265   }
1266 
1267   if (debug) { /* -----------------------------------  */
1268     nt = *nproc;
1269     for (i=0; i<nt; i++) {
1270       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1271       for (j=0; j<(*numprocs)[i]; j++) {
1272         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1273       }
1274       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1275     }
1276     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1277   } /* -----------------------------------  */
1278 
1279   /* wait on sends */
1280   if (nsends2) {
1281     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1282     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1283     ierr = PetscFree(send_status);CHKERRQ(ierr);
1284   }
1285 
1286   ierr = PetscFree(starts3);CHKERRQ(ierr);
1287   ierr = PetscFree(dest);CHKERRQ(ierr);
1288   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1289 
1290   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1291   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1292   ierr = PetscFree(starts);CHKERRQ(ierr);
1293   ierr = PetscFree(starts2);CHKERRQ(ierr);
1294   ierr = PetscFree(lens2);CHKERRQ(ierr);
1295 
1296   ierr = PetscFree(source);CHKERRQ(ierr);
1297   ierr = PetscFree(len);CHKERRQ(ierr);
1298   ierr = PetscFree(recvs);CHKERRQ(ierr);
1299   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1300   ierr = PetscFree(sends2);CHKERRQ(ierr);
1301 
1302   /* put the information about myself as the first entry in the list */
1303   first_procs    = (*procs)[0];
1304   first_numprocs = (*numprocs)[0];
1305   first_indices  = (*indices)[0];
1306   for (i=0; i<*nproc; i++) {
1307     if ((*procs)[i] == rank) {
1308       (*procs)[0]    = (*procs)[i];
1309       (*numprocs)[0] = (*numprocs)[i];
1310       (*indices)[0]  = (*indices)[i];
1311       (*procs)[i]    = first_procs;
1312       (*numprocs)[i] = first_numprocs;
1313       (*indices)[i]  = first_indices;
1314       break;
1315     }
1316   }
1317 
1318   /* save info for reuse */
1319   mapping->info_nproc = *nproc;
1320   mapping->info_procs = *procs;
1321   mapping->info_numprocs = *numprocs;
1322   mapping->info_indices = *indices;
1323   mapping->info_cached = PETSC_TRUE;
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 /*@C
1328     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1329 
1330     Collective on ISLocalToGlobalMapping
1331 
1332     Input Parameters:
1333 .   mapping - the mapping from local to global indexing
1334 
1335     Output Parameter:
1336 +   nproc - number of processors that are connected to this one
1337 .   proc - neighboring processors
1338 .   numproc - number of indices for each processor
1339 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1340 
1341     Level: advanced
1342 
1343 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1344           ISLocalToGlobalMappingGetInfo()
1345 @*/
1346 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1347 {
1348   PetscErrorCode ierr;
1349 
1350   PetscFunctionBegin;
1351   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1352   if (mapping->info_free) {
1353     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1354     if (*indices) {
1355       PetscInt i;
1356 
1357       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1358       for (i=1; i<*nproc; i++) {
1359         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1360       }
1361       ierr = PetscFree(*indices);CHKERRQ(ierr);
1362     }
1363   }
1364   *nproc    = 0;
1365   *procs    = NULL;
1366   *numprocs = NULL;
1367   *indices  = NULL;
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 /*@C
1372     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1373      each index shared by more than one processor
1374 
1375     Collective on ISLocalToGlobalMapping
1376 
1377     Input Parameters:
1378 .   mapping - the mapping from local to global indexing
1379 
1380     Output Parameter:
1381 +   nproc - number of processors that are connected to this one
1382 .   proc - neighboring processors
1383 .   numproc - number of indices for each subdomain (processor)
1384 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1385 
1386     Level: advanced
1387 
1388     Concepts: mapping^local to global
1389 
1390     Fortran Usage:
1391 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1392 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1393           PetscInt indices[nproc][numprocmax],ierr)
1394         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1395         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1396 
1397 
1398 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1399           ISLocalToGlobalMappingRestoreInfo()
1400 @*/
1401 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1402 {
1403   PetscErrorCode ierr;
1404   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1405 
1406   PetscFunctionBegin;
1407   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1408   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1409   if (bs > 1) { /* we need to expand the cached info */
1410     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1411     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1412     for (i=0; i<*nproc; i++) {
1413       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1414       for (j=0; j<bnumprocs[i]; j++) {
1415         for (k=0; k<bs; k++) {
1416           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1417         }
1418       }
1419       (*numprocs)[i] = bnumprocs[i]*bs;
1420     }
1421     mapping->info_free = PETSC_TRUE;
1422   } else {
1423     *numprocs = bnumprocs;
1424     *indices  = bindices;
1425   }
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 /*@C
1430     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1431 
1432     Collective on ISLocalToGlobalMapping
1433 
1434     Input Parameters:
1435 .   mapping - the mapping from local to global indexing
1436 
1437     Output Parameter:
1438 +   nproc - number of processors that are connected to this one
1439 .   proc - neighboring processors
1440 .   numproc - number of indices for each processor
1441 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1442 
1443     Level: advanced
1444 
1445 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1446           ISLocalToGlobalMappingGetInfo()
1447 @*/
1448 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1449 {
1450   PetscErrorCode ierr;
1451 
1452   PetscFunctionBegin;
1453   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 /*@C
1458    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1459 
1460    Not Collective
1461 
1462    Input Arguments:
1463 . ltog - local to global mapping
1464 
1465    Output Arguments:
1466 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1467 
1468    Level: advanced
1469 
1470    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1471 
1472 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1473 @*/
1474 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1475 {
1476   PetscFunctionBegin;
1477   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1478   PetscValidPointer(array,2);
1479   if (ltog->bs == 1) {
1480     *array = ltog->indices;
1481   } else {
1482     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1483     const PetscInt *ii;
1484     PetscErrorCode ierr;
1485 
1486     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1487     *array = jj;
1488     k    = 0;
1489     ii   = ltog->indices;
1490     for (i=0; i<n; i++)
1491       for (j=0; j<bs; j++)
1492         jj[k++] = bs*ii[i] + j;
1493   }
1494   PetscFunctionReturn(0);
1495 }
1496 
1497 /*@C
1498    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1499 
1500    Not Collective
1501 
1502    Input Arguments:
1503 + ltog - local to global mapping
1504 - array - array of indices
1505 
1506    Level: advanced
1507 
1508 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1509 @*/
1510 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1511 {
1512   PetscFunctionBegin;
1513   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1514   PetscValidPointer(array,2);
1515   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1516 
1517   if (ltog->bs > 1) {
1518     PetscErrorCode ierr;
1519     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1520   }
1521   PetscFunctionReturn(0);
1522 }
1523 
1524 /*@C
1525    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1526 
1527    Not Collective
1528 
1529    Input Arguments:
1530 . ltog - local to global mapping
1531 
1532    Output Arguments:
1533 . array - array of indices
1534 
1535    Level: advanced
1536 
1537 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1538 @*/
1539 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1540 {
1541   PetscFunctionBegin;
1542   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1543   PetscValidPointer(array,2);
1544   *array = ltog->indices;
1545   PetscFunctionReturn(0);
1546 }
1547 
1548 /*@C
1549    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1550 
1551    Not Collective
1552 
1553    Input Arguments:
1554 + ltog - local to global mapping
1555 - array - array of indices
1556 
1557    Level: advanced
1558 
1559 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1560 @*/
1561 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1562 {
1563   PetscFunctionBegin;
1564   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1565   PetscValidPointer(array,2);
1566   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1567   *array = NULL;
1568   PetscFunctionReturn(0);
1569 }
1570 
1571 /*@C
1572    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1573 
1574    Not Collective
1575 
1576    Input Arguments:
1577 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1578 . n - number of mappings to concatenate
1579 - ltogs - local to global mappings
1580 
1581    Output Arguments:
1582 . ltogcat - new mapping
1583 
1584    Note: this currently always returns a mapping with block size of 1
1585 
1586    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1587 
1588    Level: advanced
1589 
1590 .seealso: ISLocalToGlobalMappingCreate()
1591 @*/
1592 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1593 {
1594   PetscInt       i,cnt,m,*idx;
1595   PetscErrorCode ierr;
1596 
1597   PetscFunctionBegin;
1598   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1599   if (n > 0) PetscValidPointer(ltogs,3);
1600   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1601   PetscValidPointer(ltogcat,4);
1602   for (cnt=0,i=0; i<n; i++) {
1603     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1604     cnt += m;
1605   }
1606   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1607   for (cnt=0,i=0; i<n; i++) {
1608     const PetscInt *subidx;
1609     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1610     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1611     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1612     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1613     cnt += m;
1614   }
1615   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1616   PetscFunctionReturn(0);
1617 }
1618 
1619 /*MC
1620       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1621                                     used this is good for only small and moderate size problems.
1622 
1623    Options Database Keys:
1624 +   -islocaltoglobalmapping_type basic - select this method
1625 
1626    Level: beginner
1627 
1628 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1629 M*/
1630 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1631 {
1632   PetscFunctionBegin;
1633   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1634   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1635   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1636   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1637   PetscFunctionReturn(0);
1638 }
1639 
1640 /*MC
1641       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1642                                     used this is good for large memory problems.
1643 
1644    Options Database Keys:
1645 +   -islocaltoglobalmapping_type hash - select this method
1646 
1647 
1648    Notes: This is selected automatically for large problems if the user does not set the type.
1649 
1650    Level: beginner
1651 
1652 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1653 M*/
1654 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1655 {
1656   PetscFunctionBegin;
1657   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1658   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1659   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1660   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 
1665 /*@C
1666     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1667 
1668    Not Collective
1669 
1670    Input Parameters:
1671 +  sname - name of a new method
1672 -  routine_create - routine to create method context
1673 
1674    Notes:
1675    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1676 
1677    Sample usage:
1678 .vb
1679    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1680 .ve
1681 
1682    Then, your mapping can be chosen with the procedural interface via
1683 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1684    or at runtime via the option
1685 $     -islocaltoglobalmapping_type my_mapper
1686 
1687    Level: advanced
1688 
1689 .keywords: ISLocalToGlobalMappingType, register
1690 
1691 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1692 
1693 @*/
1694 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1695 {
1696   PetscErrorCode ierr;
1697 
1698   PetscFunctionBegin;
1699   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1700   PetscFunctionReturn(0);
1701 }
1702 
1703 /*@C
1704    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1705 
1706    Logically Collective on ISLocalToGlobalMapping
1707 
1708    Input Parameters:
1709 +  ltog - the ISLocalToGlobalMapping object
1710 -  type - a known method
1711 
1712    Options Database Key:
1713 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1714     of available methods (for instance, basic or hash)
1715 
1716    Notes:
1717    See "petsc/include/petscis.h" for available methods
1718 
1719   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1720   then set the ISLocalToGlobalMapping type from the options database rather than by using
1721   this routine.
1722 
1723   Level: intermediate
1724 
1725   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1726   are accessed by ISLocalToGlobalMappingSetType().
1727 
1728 .keywords: ISLocalToGlobalMapping, set, method
1729 
1730 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1731 
1732 @*/
1733 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1734 {
1735   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1736   PetscBool      match;
1737 
1738   PetscFunctionBegin;
1739   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1740   PetscValidCharPointer(type,2);
1741 
1742   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1743   if (match) PetscFunctionReturn(0);
1744 
1745   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1746   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1747   /* Destroy the previous private LTOG context */
1748   if (ltog->ops->destroy) {
1749     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1750     ltog->ops->destroy = NULL;
1751   }
1752   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1753   ierr = (*r)(ltog);CHKERRQ(ierr);
1754   PetscFunctionReturn(0);
1755 }
1756 
1757 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1758 
1759 /*@C
1760   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1761 
1762   Not Collective
1763 
1764   Level: advanced
1765 
1766 .keywords: IS, register, all
1767 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1768 @*/
1769 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1770 {
1771   PetscErrorCode ierr;
1772 
1773   PetscFunctionBegin;
1774   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1775   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1776 
1777   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1778   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1779   PetscFunctionReturn(0);
1780 }
1781 
1782