xref: /petsc/src/vec/is/utils/isltog.c (revision 34e9cff27d07519c2d99d1c0a9fe31a7bc1659d3)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hash.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt    *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHashI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 /* -----------------------------------------------------------------------------------------*/
20 
21 /*
22     Creates the global mapping information in the ISLocalToGlobalMapping structure
23 
24     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
25 */
26 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
27 {
28   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   if (mapping->data) PetscFunctionReturn(0);
33   end   = 0;
34   start = PETSC_MAX_INT;
35 
36   for (i=0; i<n; i++) {
37     if (idx[i] < 0) continue;
38     if (idx[i] < start) start = idx[i];
39     if (idx[i] > end)   end   = idx[i];
40   }
41   if (start > end) {start = 0; end = -1;}
42   mapping->globalstart = start;
43   mapping->globalend   = end;
44   if (!((PetscObject)mapping)->type_name) {
45     if ((end - start) > PetscMax(4*n,1000000)) {
46       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
47     } else {
48       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
49     }
50   }
51   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
56 {
57   PetscErrorCode              ierr;
58   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
59   ISLocalToGlobalMapping_Basic *map;
60 
61   PetscFunctionBegin;
62   start            = mapping->globalstart;
63   end              = mapping->globalend;
64   ierr             = PetscNew(&map);CHKERRQ(ierr);
65   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
66   map->globals     = globals;
67   for (i=0; i<end-start+1; i++) globals[i] = -1;
68   for (i=0; i<n; i++) {
69     if (idx[i] < 0) continue;
70     globals[idx[i] - start] = i;
71   }
72   mapping->data = (void*)map;
73   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
78 {
79   PetscErrorCode              ierr;
80   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
81   ISLocalToGlobalMapping_Hash *map;
82 
83   PetscFunctionBegin;
84   ierr = PetscNew(&map);CHKERRQ(ierr);
85   PetscHashICreate(map->globalht);
86   for (i=0; i<n; i++ ) {
87     if (idx[i] < 0) continue;
88     PetscHashIAdd(map->globalht, idx[i], i);
89   }
90   mapping->data = (void*)map;
91   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
92   PetscFunctionReturn(0);
93 }
94 
95 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
96 {
97   PetscErrorCode              ierr;
98   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
99 
100   PetscFunctionBegin;
101   if (!map) PetscFunctionReturn(0);
102   ierr = PetscFree(map->globals);CHKERRQ(ierr);
103   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
108 {
109   PetscErrorCode              ierr;
110   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
111 
112   PetscFunctionBegin;
113   if (!map) PetscFunctionReturn(0);
114   PetscHashIDestroy(map->globalht);
115   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
116   PetscFunctionReturn(0);
117 }
118 
119 #define GTOLTYPE _Basic
120 #define GTOLNAME _Basic
121 #define GTOL(g, local) do {                  \
122     local = map->globals[g/bs - start];      \
123     local = bs*local + (g % bs);             \
124   } while (0)
125 #include <../src/vec/is/utils/isltog.h>
126 
127 #define GTOLTYPE _Basic
128 #define GTOLNAME Block_Basic
129 #define GTOL(g, local) do {                  \
130     local = map->globals[g - start];         \
131   } while (0)
132 #include <../src/vec/is/utils/isltog.h>
133 
134 #define GTOLTYPE _Hash
135 #define GTOLNAME _Hash
136 #define GTOL(g, local) do {                  \
137     PetscHashIMap(map->globalht,g/bs,local); \
138     local = bs*local + (g % bs);             \
139    } while (0)
140 #include <../src/vec/is/utils/isltog.h>
141 
142 #define GTOLTYPE _Hash
143 #define GTOLNAME Block_Hash
144 #define GTOL(g, local) do {                  \
145     PetscHashIMap(map->globalht,g,local);    \
146   } while (0)
147 #include <../src/vec/is/utils/isltog.h>
148 
149 /*@
150     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
151 
152     Not Collective
153 
154     Input Parameter:
155 .   ltog - local to global mapping
156 
157     Output Parameter:
158 .   nltog - the duplicated local to global mapping
159 
160     Level: advanced
161 
162     Concepts: mapping^local to global
163 
164 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
165 @*/
166 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
167 {
168   PetscErrorCode ierr;
169 
170   PetscFunctionBegin;
171   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
172   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 /*@
177     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
178 
179     Not Collective
180 
181     Input Parameter:
182 .   ltog - local to global mapping
183 
184     Output Parameter:
185 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
186 
187     Level: advanced
188 
189     Concepts: mapping^local to global
190 
191 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
192 @*/
193 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
194 {
195   PetscFunctionBegin;
196   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
197   PetscValidIntPointer(n,2);
198   *n = mapping->bs*mapping->n;
199   PetscFunctionReturn(0);
200 }
201 
202 /*@C
203     ISLocalToGlobalMappingView - View a local to global mapping
204 
205     Not Collective
206 
207     Input Parameters:
208 +   ltog - local to global mapping
209 -   viewer - viewer
210 
211     Level: advanced
212 
213     Concepts: mapping^local to global
214 
215 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
216 @*/
217 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
218 {
219   PetscInt       i;
220   PetscMPIInt    rank;
221   PetscBool      iascii;
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
226   if (!viewer) {
227     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
228   }
229   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
230 
231   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
232   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
233   if (iascii) {
234     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
235     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
236     for (i=0; i<mapping->n; i++) {
237       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
238     }
239     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
240     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
241   }
242   PetscFunctionReturn(0);
243 }
244 
245 /*@
246     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
247     ordering and a global parallel ordering.
248 
249     Not collective
250 
251     Input Parameter:
252 .   is - index set containing the global numbers for each local number
253 
254     Output Parameter:
255 .   mapping - new mapping data structure
256 
257     Notes: the block size of the IS determines the block size of the mapping
258     Level: advanced
259 
260     Concepts: mapping^local to global
261 
262 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
263 @*/
264 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
265 {
266   PetscErrorCode ierr;
267   PetscInt       n,bs;
268   const PetscInt *indices;
269   MPI_Comm       comm;
270   PetscBool      isblock;
271 
272   PetscFunctionBegin;
273   PetscValidHeaderSpecific(is,IS_CLASSID,1);
274   PetscValidPointer(mapping,2);
275 
276   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
277   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
278   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
279   if (!isblock) {
280     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
281     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
282     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
283   } else {
284     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
285     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
286     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
287     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
288   }
289   PetscFunctionReturn(0);
290 }
291 
292 /*@C
293     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
294     ordering and a global parallel ordering.
295 
296     Collective
297 
298     Input Parameter:
299 +   sf - star forest mapping contiguous local indices to (rank, offset)
300 -   start - first global index on this process
301 
302     Output Parameter:
303 .   mapping - new mapping data structure
304 
305     Level: advanced
306 
307     Concepts: mapping^local to global
308 
309 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
310 @*/
311 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
312 {
313   PetscErrorCode ierr;
314   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
315   const PetscInt *ilocal;
316   MPI_Comm       comm;
317 
318   PetscFunctionBegin;
319   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
320   PetscValidPointer(mapping,3);
321 
322   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
323   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
324   if (ilocal) {
325     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
326   }
327   else maxlocal = nleaves;
328   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
329   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
330   for (i=0; i<nroots; i++) globals[i] = start + i;
331   for (i=0; i<maxlocal; i++) ltog[i] = -1;
332   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
333   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
334   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
335   ierr = PetscFree(globals);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 /*@
340     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
341 
342     Not collective
343 
344     Input Parameters:
345 .   mapping - mapping data structure
346 .   bs - the blocksize
347 
348     Level: advanced
349 
350     Concepts: mapping^local to global
351 
352 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
353 @*/
354 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
355 {
356   PetscInt       *nid;
357   const PetscInt *oid;
358   PetscInt       i,cn,on,obs,nn;
359   PetscErrorCode ierr;
360 
361   PetscFunctionBegin;
362   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
363   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
364   if (bs == mapping->bs) PetscFunctionReturn(0);
365   on  = mapping->n;
366   obs = mapping->bs;
367   oid = mapping->indices;
368   nn  = (on*obs)/bs;
369   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
370 
371   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
372   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
373   for (i=0;i<nn;i++) {
374     PetscInt j;
375     for (j=0,cn=0;j<bs-1;j++) {
376       if (oid[i*bs+j] < 0) { cn++; continue; }
377       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
378     }
379     if (oid[i*bs+j] < 0) cn++;
380     if (cn) {
381       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
382       nid[i] = -1;
383     } else {
384       nid[i] = oid[i*bs]/bs;
385     }
386   }
387   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
388 
389   mapping->n           = nn;
390   mapping->bs          = bs;
391   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
392   mapping->indices     = nid;
393   mapping->globalstart = 0;
394   mapping->globalend   = 0;
395   if (mapping->ops->destroy) {
396     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
397   }
398   PetscFunctionReturn(0);
399 }
400 
401 /*@
402     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
403     ordering and a global parallel ordering.
404 
405     Not Collective
406 
407     Input Parameters:
408 .   mapping - mapping data structure
409 
410     Output Parameter:
411 .   bs - the blocksize
412 
413     Level: advanced
414 
415     Concepts: mapping^local to global
416 
417 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
418 @*/
419 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
420 {
421   PetscFunctionBegin;
422   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
423   *bs = mapping->bs;
424   PetscFunctionReturn(0);
425 }
426 
427 /*@
428     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
429     ordering and a global parallel ordering.
430 
431     Not Collective, but communicator may have more than one process
432 
433     Input Parameters:
434 +   comm - MPI communicator
435 .   bs - the block size
436 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
437 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
438 -   mode - see PetscCopyMode
439 
440     Output Parameter:
441 .   mapping - new mapping data structure
442 
443     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
444 
445     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
446     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
447     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
448 
449     Level: advanced
450 
451     Concepts: mapping^local to global
452 
453 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
454           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
455 @*/
456 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
457 {
458   PetscErrorCode ierr;
459   PetscInt       *in;
460 
461   PetscFunctionBegin;
462   if (n) PetscValidIntPointer(indices,3);
463   PetscValidPointer(mapping,4);
464 
465   *mapping = NULL;
466   ierr = ISInitializePackage();CHKERRQ(ierr);
467 
468   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
469                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
470   (*mapping)->n             = n;
471   (*mapping)->bs            = bs;
472   (*mapping)->info_cached   = PETSC_FALSE;
473   (*mapping)->info_free     = PETSC_FALSE;
474   (*mapping)->info_procs    = NULL;
475   (*mapping)->info_numprocs = NULL;
476   (*mapping)->info_indices  = NULL;
477 
478   (*mapping)->ops->globaltolocalmappingapply      = NULL;
479   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
480   (*mapping)->ops->destroy                        = NULL;
481   if (mode == PETSC_COPY_VALUES) {
482     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
483     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
484     (*mapping)->indices = in;
485     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
486   } else if (mode == PETSC_OWN_POINTER) {
487     (*mapping)->indices = (PetscInt*)indices;
488     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
489   }
490   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
491   PetscFunctionReturn(0);
492 }
493 
494 PetscFunctionList ISLocalToGlobalMappingList = NULL;
495 
496 
497 /*@
498    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
499 
500    Not collective
501 
502    Input Parameters:
503 .  mapping - mapping data structure
504 
505    Level: advanced
506 
507 @*/
508 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
509 {
510   PetscErrorCode             ierr;
511   char                       type[256];
512   ISLocalToGlobalMappingType defaulttype = "Not set";
513   PetscBool                  flg;
514 
515   PetscFunctionBegin;
516   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
517   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
518   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
519   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
520   if (flg) {
521     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
522   }
523   ierr = PetscOptionsEnd();CHKERRQ(ierr);
524   PetscFunctionReturn(0);
525 }
526 
527 /*@
528    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
529    ordering and a global parallel ordering.
530 
531    Note Collective
532 
533    Input Parameters:
534 .  mapping - mapping data structure
535 
536    Level: advanced
537 
538 .seealso: ISLocalToGlobalMappingCreate()
539 @*/
540 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
541 {
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   if (!*mapping) PetscFunctionReturn(0);
546   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
547   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
548   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
549   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
550   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
551   if ((*mapping)->info_indices) {
552     PetscInt i;
553 
554     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
555     for (i=1; i<(*mapping)->info_nproc; i++) {
556       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
557     }
558     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
559   }
560   if ((*mapping)->ops->destroy) {
561     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
562   }
563   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
564   *mapping = 0;
565   PetscFunctionReturn(0);
566 }
567 
568 /*@
569     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
570     a new index set using the global numbering defined in an ISLocalToGlobalMapping
571     context.
572 
573     Not collective
574 
575     Input Parameters:
576 +   mapping - mapping between local and global numbering
577 -   is - index set in local numbering
578 
579     Output Parameters:
580 .   newis - index set in global numbering
581 
582     Level: advanced
583 
584     Concepts: mapping^local to global
585 
586 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
587           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
588 @*/
589 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
590 {
591   PetscErrorCode ierr;
592   PetscInt       n,*idxout;
593   const PetscInt *idxin;
594 
595   PetscFunctionBegin;
596   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
597   PetscValidHeaderSpecific(is,IS_CLASSID,2);
598   PetscValidPointer(newis,3);
599 
600   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
601   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
602   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
603   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
604   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
605   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
606   PetscFunctionReturn(0);
607 }
608 
609 /*@
610    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
611    and converts them to the global numbering.
612 
613    Not collective
614 
615    Input Parameters:
616 +  mapping - the local to global mapping context
617 .  N - number of integers
618 -  in - input indices in local numbering
619 
620    Output Parameter:
621 .  out - indices in global numbering
622 
623    Notes:
624    The in and out array parameters may be identical.
625 
626    Level: advanced
627 
628 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
629           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
630           AOPetscToApplication(), ISGlobalToLocalMappingApply()
631 
632     Concepts: mapping^local to global
633 @*/
634 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
635 {
636   PetscInt i,bs,Nmax;
637 
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
640   bs   = mapping->bs;
641   Nmax = bs*mapping->n;
642   if (bs == 1) {
643     const PetscInt *idx = mapping->indices;
644     for (i=0; i<N; i++) {
645       if (in[i] < 0) {
646         out[i] = in[i];
647         continue;
648       }
649       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
650       out[i] = idx[in[i]];
651     }
652   } else {
653     const PetscInt *idx = mapping->indices;
654     for (i=0; i<N; i++) {
655       if (in[i] < 0) {
656         out[i] = in[i];
657         continue;
658       }
659       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
660       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
661     }
662   }
663   PetscFunctionReturn(0);
664 }
665 
666 /*@
667    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
668 
669    Not collective
670 
671    Input Parameters:
672 +  mapping - the local to global mapping context
673 .  N - number of integers
674 -  in - input indices in local block numbering
675 
676    Output Parameter:
677 .  out - indices in global block numbering
678 
679    Notes:
680    The in and out array parameters may be identical.
681 
682    Example:
683      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
684      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
685 
686    Level: advanced
687 
688 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
689           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
690           AOPetscToApplication(), ISGlobalToLocalMappingApply()
691 
692     Concepts: mapping^local to global
693 @*/
694 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
695 {
696 
697   PetscFunctionBegin;
698   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
699   {
700     PetscInt i,Nmax = mapping->n;
701     const PetscInt *idx = mapping->indices;
702 
703     for (i=0; i<N; i++) {
704       if (in[i] < 0) {
705         out[i] = in[i];
706         continue;
707       }
708       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
709       out[i] = idx[in[i]];
710     }
711   }
712   PetscFunctionReturn(0);
713 }
714 
715 /*@
716     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
717     specified with a global numbering.
718 
719     Not collective
720 
721     Input Parameters:
722 +   mapping - mapping between local and global numbering
723 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
724            IS_GTOLM_DROP - drops the indices with no local value from the output list
725 .   n - number of global indices to map
726 -   idx - global indices to map
727 
728     Output Parameters:
729 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
730 -   idxout - local index of each global index, one must pass in an array long enough
731              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
732              idxout == NULL to determine the required length (returned in nout)
733              and then allocate the required space and call ISGlobalToLocalMappingApply()
734              a second time to set the values.
735 
736     Notes:
737     Either nout or idxout may be NULL. idx and idxout may be identical.
738 
739     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
740     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
741     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
742 
743     Level: advanced
744 
745     Developer Note: The manual page states that idx and idxout may be identical but the calling
746        sequence declares idx as const so it cannot be the same as idxout.
747 
748     Concepts: mapping^global to local
749 
750 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
751           ISLocalToGlobalMappingDestroy()
752 @*/
753 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
754 {
755   PetscErrorCode ierr;
756 
757   PetscFunctionBegin;
758   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
759   if (!mapping->data) {
760     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
761   }
762   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
763   PetscFunctionReturn(0);
764 }
765 
766 /*@
767     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
768     a new index set using the local numbering defined in an ISLocalToGlobalMapping
769     context.
770 
771     Not collective
772 
773     Input Parameters:
774 +   mapping - mapping between local and global numbering
775 -   is - index set in global numbering
776 
777     Output Parameters:
778 .   newis - index set in local numbering
779 
780     Level: advanced
781 
782     Concepts: mapping^local to global
783 
784 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
785           ISLocalToGlobalMappingDestroy()
786 @*/
787 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, IS is,IS *newis)
788 {
789   PetscErrorCode ierr;
790   PetscInt       n,nout,*idxout;
791   const PetscInt *idxin;
792 
793   PetscFunctionBegin;
794   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
795   PetscValidHeaderSpecific(is,IS_CLASSID,3);
796   PetscValidPointer(newis,4);
797 
798   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
799   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
800   if (type == IS_GTOLM_MASK) {
801     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
802   } else {
803     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
804     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
805   }
806   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
807   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
808   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 /*@
813     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
814     specified with a block global numbering.
815 
816     Not collective
817 
818     Input Parameters:
819 +   mapping - mapping between local and global numbering
820 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
821            IS_GTOLM_DROP - drops the indices with no local value from the output list
822 .   n - number of global indices to map
823 -   idx - global indices to map
824 
825     Output Parameters:
826 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
827 -   idxout - local index of each global index, one must pass in an array long enough
828              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
829              idxout == NULL to determine the required length (returned in nout)
830              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
831              a second time to set the values.
832 
833     Notes:
834     Either nout or idxout may be NULL. idx and idxout may be identical.
835 
836     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
837     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
838     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
839 
840     Level: advanced
841 
842     Developer Note: The manual page states that idx and idxout may be identical but the calling
843        sequence declares idx as const so it cannot be the same as idxout.
844 
845     Concepts: mapping^global to local
846 
847 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
848           ISLocalToGlobalMappingDestroy()
849 @*/
850 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
851                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
852 {
853   PetscErrorCode ierr;
854 
855   PetscFunctionBegin;
856   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
857   if (!mapping->data) {
858     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
859   }
860   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 
865 /*@C
866     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
867      each index shared by more than one processor
868 
869     Collective on ISLocalToGlobalMapping
870 
871     Input Parameters:
872 .   mapping - the mapping from local to global indexing
873 
874     Output Parameter:
875 +   nproc - number of processors that are connected to this one
876 .   proc - neighboring processors
877 .   numproc - number of indices for each subdomain (processor)
878 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
879 
880     Level: advanced
881 
882     Concepts: mapping^local to global
883 
884     Fortran Usage:
885 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
886 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
887           PetscInt indices[nproc][numprocmax],ierr)
888         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
889         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
890 
891 
892 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
893           ISLocalToGlobalMappingRestoreInfo()
894 @*/
895 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
896 {
897   PetscErrorCode ierr;
898 
899   PetscFunctionBegin;
900   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
901   if (mapping->info_cached) {
902     *nproc    = mapping->info_nproc;
903     *procs    = mapping->info_procs;
904     *numprocs = mapping->info_numprocs;
905     *indices  = mapping->info_indices;
906   } else {
907     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
908   }
909   PetscFunctionReturn(0);
910 }
911 
912 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
913 {
914   PetscErrorCode ierr;
915   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
916   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
917   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
918   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
919   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
920   PetscInt       first_procs,first_numprocs,*first_indices;
921   MPI_Request    *recv_waits,*send_waits;
922   MPI_Status     recv_status,*send_status,*recv_statuses;
923   MPI_Comm       comm;
924   PetscBool      debug = PETSC_FALSE;
925 
926   PetscFunctionBegin;
927   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
928   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
929   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
930   if (size == 1) {
931     *nproc         = 0;
932     *procs         = NULL;
933     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
934     (*numprocs)[0] = 0;
935     ierr           = PetscNew(indices);CHKERRQ(ierr);
936     (*indices)[0]  = NULL;
937     /* save info for reuse */
938     mapping->info_nproc = *nproc;
939     mapping->info_procs = *procs;
940     mapping->info_numprocs = *numprocs;
941     mapping->info_indices = *indices;
942     mapping->info_cached = PETSC_TRUE;
943     PetscFunctionReturn(0);
944   }
945 
946   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
947 
948   /*
949     Notes on ISLocalToGlobalMappingGetBlockInfo
950 
951     globally owned node - the nodes that have been assigned to this processor in global
952            numbering, just for this routine.
953 
954     nontrivial globally owned node - node assigned to this processor that is on a subdomain
955            boundary (i.e. is has more than one local owner)
956 
957     locally owned node - node that exists on this processors subdomain
958 
959     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
960            local subdomain
961   */
962   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
963   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
964   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
965 
966   for (i=0; i<n; i++) {
967     if (lindices[i] > max) max = lindices[i];
968   }
969   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
970   Ng++;
971   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
972   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
973   scale  = Ng/size + 1;
974   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
975   rstart = scale*rank;
976 
977   /* determine ownership ranges of global indices */
978   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
979   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
980 
981   /* determine owners of each local node  */
982   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
983   for (i=0; i<n; i++) {
984     proc             = lindices[i]/scale; /* processor that globally owns this index */
985     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
986     owner[i]         = proc;
987     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
988   }
989   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
990   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
991 
992   /* inform other processors of number of messages and max length*/
993   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
994   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
995 
996   /* post receives for owned rows */
997   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
998   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
999   for (i=0; i<nrecvs; i++) {
1000     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1001   }
1002 
1003   /* pack messages containing lists of local nodes to owners */
1004   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1005   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1006   starts[0] = 0;
1007   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1008   for (i=0; i<n; i++) {
1009     sends[starts[owner[i]]++] = lindices[i];
1010     sends[starts[owner[i]]++] = i;
1011   }
1012   ierr = PetscFree(owner);CHKERRQ(ierr);
1013   starts[0] = 0;
1014   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1015 
1016   /* send the messages */
1017   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1018   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1019   cnt = 0;
1020   for (i=0; i<size; i++) {
1021     if (nprocs[2*i]) {
1022       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1023       dest[cnt] = i;
1024       cnt++;
1025     }
1026   }
1027   ierr = PetscFree(starts);CHKERRQ(ierr);
1028 
1029   /* wait on receives */
1030   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1031   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1032   cnt  = nrecvs;
1033   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1034   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1035   while (cnt) {
1036     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1037     /* unpack receives into our local space */
1038     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1039     source[imdex] = recv_status.MPI_SOURCE;
1040     len[imdex]    = len[imdex]/2;
1041     /* count how many local owners for each of my global owned indices */
1042     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1043     cnt--;
1044   }
1045   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1046 
1047   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1048   nowned  = 0;
1049   nownedm = 0;
1050   for (i=0; i<ng; i++) {
1051     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1052   }
1053 
1054   /* create single array to contain rank of all local owners of each globally owned index */
1055   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1056   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1057   starts[0] = 0;
1058   for (i=1; i<ng; i++) {
1059     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1060     else starts[i] = starts[i-1];
1061   }
1062 
1063   /* for each nontrival globally owned node list all arriving processors */
1064   for (i=0; i<nrecvs; i++) {
1065     for (j=0; j<len[i]; j++) {
1066       node = recvs[2*i*nmax+2*j]-rstart;
1067       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1068     }
1069   }
1070 
1071   if (debug) { /* -----------------------------------  */
1072     starts[0] = 0;
1073     for (i=1; i<ng; i++) {
1074       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1075       else starts[i] = starts[i-1];
1076     }
1077     for (i=0; i<ng; i++) {
1078       if (nownedsenders[i] > 1) {
1079         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1080         for (j=0; j<nownedsenders[i]; j++) {
1081           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1082         }
1083         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1084       }
1085     }
1086     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1087   } /* -----------------------------------  */
1088 
1089   /* wait on original sends */
1090   if (nsends) {
1091     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1092     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1093     ierr = PetscFree(send_status);CHKERRQ(ierr);
1094   }
1095   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1096   ierr = PetscFree(sends);CHKERRQ(ierr);
1097   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1098 
1099   /* pack messages to send back to local owners */
1100   starts[0] = 0;
1101   for (i=1; i<ng; i++) {
1102     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1103     else starts[i] = starts[i-1];
1104   }
1105   nsends2 = nrecvs;
1106   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1107   for (i=0; i<nrecvs; i++) {
1108     nprocs[i] = 1;
1109     for (j=0; j<len[i]; j++) {
1110       node = recvs[2*i*nmax+2*j]-rstart;
1111       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1112     }
1113   }
1114   nt = 0;
1115   for (i=0; i<nsends2; i++) nt += nprocs[i];
1116 
1117   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1118   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1119 
1120   starts2[0] = 0;
1121   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1122   /*
1123      Each message is 1 + nprocs[i] long, and consists of
1124        (0) the number of nodes being sent back
1125        (1) the local node number,
1126        (2) the number of processors sharing it,
1127        (3) the processors sharing it
1128   */
1129   for (i=0; i<nsends2; i++) {
1130     cnt = 1;
1131     sends2[starts2[i]] = 0;
1132     for (j=0; j<len[i]; j++) {
1133       node = recvs[2*i*nmax+2*j]-rstart;
1134       if (nownedsenders[node] > 1) {
1135         sends2[starts2[i]]++;
1136         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1137         sends2[starts2[i]+cnt++] = nownedsenders[node];
1138         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1139         cnt += nownedsenders[node];
1140       }
1141     }
1142   }
1143 
1144   /* receive the message lengths */
1145   nrecvs2 = nsends;
1146   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1147   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1148   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1149   for (i=0; i<nrecvs2; i++) {
1150     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1151   }
1152 
1153   /* send the message lengths */
1154   for (i=0; i<nsends2; i++) {
1155     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1156   }
1157 
1158   /* wait on receives of lens */
1159   if (nrecvs2) {
1160     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1161     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1162     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1163   }
1164   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1165 
1166   starts3[0] = 0;
1167   nt         = 0;
1168   for (i=0; i<nrecvs2-1; i++) {
1169     starts3[i+1] = starts3[i] + lens2[i];
1170     nt          += lens2[i];
1171   }
1172   if (nrecvs2) nt += lens2[nrecvs2-1];
1173 
1174   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1175   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1176   for (i=0; i<nrecvs2; i++) {
1177     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1178   }
1179 
1180   /* send the messages */
1181   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1182   for (i=0; i<nsends2; i++) {
1183     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1184   }
1185 
1186   /* wait on receives */
1187   if (nrecvs2) {
1188     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1189     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1190     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1191   }
1192   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1193   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1194 
1195   if (debug) { /* -----------------------------------  */
1196     cnt = 0;
1197     for (i=0; i<nrecvs2; i++) {
1198       nt = recvs2[cnt++];
1199       for (j=0; j<nt; j++) {
1200         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1201         for (k=0; k<recvs2[cnt+1]; k++) {
1202           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1203         }
1204         cnt += 2 + recvs2[cnt+1];
1205         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1206       }
1207     }
1208     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1209   } /* -----------------------------------  */
1210 
1211   /* count number subdomains for each local node */
1212   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1213   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1214   cnt  = 0;
1215   for (i=0; i<nrecvs2; i++) {
1216     nt = recvs2[cnt++];
1217     for (j=0; j<nt; j++) {
1218       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1219       cnt += 2 + recvs2[cnt+1];
1220     }
1221   }
1222   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1223   *nproc    = nt;
1224   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1225   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1226   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1227   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1228   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1229   cnt  = 0;
1230   for (i=0; i<size; i++) {
1231     if (nprocs[i] > 0) {
1232       bprocs[i]        = cnt;
1233       (*procs)[cnt]    = i;
1234       (*numprocs)[cnt] = nprocs[i];
1235       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1236       cnt++;
1237     }
1238   }
1239 
1240   /* make the list of subdomains for each nontrivial local node */
1241   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1242   cnt  = 0;
1243   for (i=0; i<nrecvs2; i++) {
1244     nt = recvs2[cnt++];
1245     for (j=0; j<nt; j++) {
1246       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1247       cnt += 2 + recvs2[cnt+1];
1248     }
1249   }
1250   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1251   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1252 
1253   /* sort the node indexing by their global numbers */
1254   nt = *nproc;
1255   for (i=0; i<nt; i++) {
1256     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1257     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1258     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1259     ierr = PetscFree(tmp);CHKERRQ(ierr);
1260   }
1261 
1262   if (debug) { /* -----------------------------------  */
1263     nt = *nproc;
1264     for (i=0; i<nt; i++) {
1265       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1266       for (j=0; j<(*numprocs)[i]; j++) {
1267         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1268       }
1269       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1270     }
1271     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1272   } /* -----------------------------------  */
1273 
1274   /* wait on sends */
1275   if (nsends2) {
1276     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1277     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1278     ierr = PetscFree(send_status);CHKERRQ(ierr);
1279   }
1280 
1281   ierr = PetscFree(starts3);CHKERRQ(ierr);
1282   ierr = PetscFree(dest);CHKERRQ(ierr);
1283   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1284 
1285   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1286   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1287   ierr = PetscFree(starts);CHKERRQ(ierr);
1288   ierr = PetscFree(starts2);CHKERRQ(ierr);
1289   ierr = PetscFree(lens2);CHKERRQ(ierr);
1290 
1291   ierr = PetscFree(source);CHKERRQ(ierr);
1292   ierr = PetscFree(len);CHKERRQ(ierr);
1293   ierr = PetscFree(recvs);CHKERRQ(ierr);
1294   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1295   ierr = PetscFree(sends2);CHKERRQ(ierr);
1296 
1297   /* put the information about myself as the first entry in the list */
1298   first_procs    = (*procs)[0];
1299   first_numprocs = (*numprocs)[0];
1300   first_indices  = (*indices)[0];
1301   for (i=0; i<*nproc; i++) {
1302     if ((*procs)[i] == rank) {
1303       (*procs)[0]    = (*procs)[i];
1304       (*numprocs)[0] = (*numprocs)[i];
1305       (*indices)[0]  = (*indices)[i];
1306       (*procs)[i]    = first_procs;
1307       (*numprocs)[i] = first_numprocs;
1308       (*indices)[i]  = first_indices;
1309       break;
1310     }
1311   }
1312 
1313   /* save info for reuse */
1314   mapping->info_nproc = *nproc;
1315   mapping->info_procs = *procs;
1316   mapping->info_numprocs = *numprocs;
1317   mapping->info_indices = *indices;
1318   mapping->info_cached = PETSC_TRUE;
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 /*@C
1323     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1324 
1325     Collective on ISLocalToGlobalMapping
1326 
1327     Input Parameters:
1328 .   mapping - the mapping from local to global indexing
1329 
1330     Output Parameter:
1331 +   nproc - number of processors that are connected to this one
1332 .   proc - neighboring processors
1333 .   numproc - number of indices for each processor
1334 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1335 
1336     Level: advanced
1337 
1338 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1339           ISLocalToGlobalMappingGetInfo()
1340 @*/
1341 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1342 {
1343   PetscErrorCode ierr;
1344 
1345   PetscFunctionBegin;
1346   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1347   if (mapping->info_free) {
1348     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1349     if (*indices) {
1350       PetscInt i;
1351 
1352       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1353       for (i=1; i<*nproc; i++) {
1354         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1355       }
1356       ierr = PetscFree(*indices);CHKERRQ(ierr);
1357     }
1358   }
1359   *nproc    = 0;
1360   *procs    = NULL;
1361   *numprocs = NULL;
1362   *indices  = NULL;
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 /*@C
1367     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1368      each index shared by more than one processor
1369 
1370     Collective on ISLocalToGlobalMapping
1371 
1372     Input Parameters:
1373 .   mapping - the mapping from local to global indexing
1374 
1375     Output Parameter:
1376 +   nproc - number of processors that are connected to this one
1377 .   proc - neighboring processors
1378 .   numproc - number of indices for each subdomain (processor)
1379 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1380 
1381     Level: advanced
1382 
1383     Concepts: mapping^local to global
1384 
1385     Fortran Usage:
1386 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1387 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1388           PetscInt indices[nproc][numprocmax],ierr)
1389         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1390         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1391 
1392 
1393 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1394           ISLocalToGlobalMappingRestoreInfo()
1395 @*/
1396 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1397 {
1398   PetscErrorCode ierr;
1399   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1400 
1401   PetscFunctionBegin;
1402   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1403   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1404   if (bs > 1) { /* we need to expand the cached info */
1405     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1406     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1407     for (i=0; i<*nproc; i++) {
1408       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1409       for (j=0; j<bnumprocs[i]; j++) {
1410         for (k=0; k<bs; k++) {
1411           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1412         }
1413       }
1414       (*numprocs)[i] = bnumprocs[i]*bs;
1415     }
1416     mapping->info_free = PETSC_TRUE;
1417   } else {
1418     *numprocs = bnumprocs;
1419     *indices  = bindices;
1420   }
1421   PetscFunctionReturn(0);
1422 }
1423 
1424 /*@C
1425     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1426 
1427     Collective on ISLocalToGlobalMapping
1428 
1429     Input Parameters:
1430 .   mapping - the mapping from local to global indexing
1431 
1432     Output Parameter:
1433 +   nproc - number of processors that are connected to this one
1434 .   proc - neighboring processors
1435 .   numproc - number of indices for each processor
1436 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1437 
1438     Level: advanced
1439 
1440 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1441           ISLocalToGlobalMappingGetInfo()
1442 @*/
1443 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1444 {
1445   PetscErrorCode ierr;
1446 
1447   PetscFunctionBegin;
1448   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 /*@C
1453    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1454 
1455    Not Collective
1456 
1457    Input Arguments:
1458 . ltog - local to global mapping
1459 
1460    Output Arguments:
1461 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1462 
1463    Level: advanced
1464 
1465    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1466 
1467 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1468 @*/
1469 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1470 {
1471   PetscFunctionBegin;
1472   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1473   PetscValidPointer(array,2);
1474   if (ltog->bs == 1) {
1475     *array = ltog->indices;
1476   } else {
1477     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1478     const PetscInt *ii;
1479     PetscErrorCode ierr;
1480 
1481     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1482     *array = jj;
1483     k    = 0;
1484     ii   = ltog->indices;
1485     for (i=0; i<n; i++)
1486       for (j=0; j<bs; j++)
1487         jj[k++] = bs*ii[i] + j;
1488   }
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 /*@C
1493    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1494 
1495    Not Collective
1496 
1497    Input Arguments:
1498 + ltog - local to global mapping
1499 - array - array of indices
1500 
1501    Level: advanced
1502 
1503 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1504 @*/
1505 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1506 {
1507   PetscFunctionBegin;
1508   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1509   PetscValidPointer(array,2);
1510   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1511 
1512   if (ltog->bs > 1) {
1513     PetscErrorCode ierr;
1514     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1515   }
1516   PetscFunctionReturn(0);
1517 }
1518 
1519 /*@C
1520    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1521 
1522    Not Collective
1523 
1524    Input Arguments:
1525 . ltog - local to global mapping
1526 
1527    Output Arguments:
1528 . array - array of indices
1529 
1530    Level: advanced
1531 
1532 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1533 @*/
1534 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1535 {
1536   PetscFunctionBegin;
1537   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1538   PetscValidPointer(array,2);
1539   *array = ltog->indices;
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 /*@C
1544    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1545 
1546    Not Collective
1547 
1548    Input Arguments:
1549 + ltog - local to global mapping
1550 - array - array of indices
1551 
1552    Level: advanced
1553 
1554 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1555 @*/
1556 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1557 {
1558   PetscFunctionBegin;
1559   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1560   PetscValidPointer(array,2);
1561   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1562   *array = NULL;
1563   PetscFunctionReturn(0);
1564 }
1565 
1566 /*@C
1567    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1568 
1569    Not Collective
1570 
1571    Input Arguments:
1572 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1573 . n - number of mappings to concatenate
1574 - ltogs - local to global mappings
1575 
1576    Output Arguments:
1577 . ltogcat - new mapping
1578 
1579    Note: this currently always returns a mapping with block size of 1
1580 
1581    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1582 
1583    Level: advanced
1584 
1585 .seealso: ISLocalToGlobalMappingCreate()
1586 @*/
1587 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1588 {
1589   PetscInt       i,cnt,m,*idx;
1590   PetscErrorCode ierr;
1591 
1592   PetscFunctionBegin;
1593   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1594   if (n > 0) PetscValidPointer(ltogs,3);
1595   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1596   PetscValidPointer(ltogcat,4);
1597   for (cnt=0,i=0; i<n; i++) {
1598     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1599     cnt += m;
1600   }
1601   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1602   for (cnt=0,i=0; i<n; i++) {
1603     const PetscInt *subidx;
1604     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1605     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1606     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1607     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1608     cnt += m;
1609   }
1610   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1611   PetscFunctionReturn(0);
1612 }
1613 
1614 /*MC
1615       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1616                                     used this is good for only small and moderate size problems.
1617 
1618    Options Database Keys:
1619 +   -islocaltoglobalmapping_type basic - select this method
1620 
1621    Level: beginner
1622 
1623 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1624 M*/
1625 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1626 {
1627   PetscFunctionBegin;
1628   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1629   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1630   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1631   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1632   PetscFunctionReturn(0);
1633 }
1634 
1635 /*MC
1636       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1637                                     used this is good for large memory problems.
1638 
1639    Options Database Keys:
1640 +   -islocaltoglobalmapping_type hash - select this method
1641 
1642 
1643    Notes: This is selected automatically for large problems if the user does not set the type.
1644 
1645    Level: beginner
1646 
1647 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1648 M*/
1649 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1650 {
1651   PetscFunctionBegin;
1652   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1653   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1654   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1655   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1656   PetscFunctionReturn(0);
1657 }
1658 
1659 
1660 /*@C
1661     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1662 
1663    Not Collective
1664 
1665    Input Parameters:
1666 +  sname - name of a new method
1667 -  routine_create - routine to create method context
1668 
1669    Notes:
1670    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1671 
1672    Sample usage:
1673 .vb
1674    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1675 .ve
1676 
1677    Then, your mapping can be chosen with the procedural interface via
1678 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1679    or at runtime via the option
1680 $     -islocaltoglobalmapping_type my_mapper
1681 
1682    Level: advanced
1683 
1684 .keywords: ISLocalToGlobalMappingType, register
1685 
1686 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1687 
1688 @*/
1689 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1690 {
1691   PetscErrorCode ierr;
1692 
1693   PetscFunctionBegin;
1694   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 /*@C
1699    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1700 
1701    Logically Collective on ISLocalToGlobalMapping
1702 
1703    Input Parameters:
1704 +  ltog - the ISLocalToGlobalMapping object
1705 -  type - a known method
1706 
1707    Options Database Key:
1708 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1709     of available methods (for instance, basic or hash)
1710 
1711    Notes:
1712    See "petsc/include/petscis.h" for available methods
1713 
1714   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1715   then set the ISLocalToGlobalMapping type from the options database rather than by using
1716   this routine.
1717 
1718   Level: intermediate
1719 
1720   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1721   are accessed by ISLocalToGlobalMappingSetType().
1722 
1723 .keywords: ISLocalToGlobalMapping, set, method
1724 
1725 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1726 
1727 @*/
1728 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1729 {
1730   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1731   PetscBool      match;
1732 
1733   PetscFunctionBegin;
1734   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1735   PetscValidCharPointer(type,2);
1736 
1737   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1738   if (match) PetscFunctionReturn(0);
1739 
1740   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1741   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1742   /* Destroy the previous private LTOG context */
1743   if (ltog->ops->destroy) {
1744     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1745     ltog->ops->destroy = NULL;
1746   }
1747   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1748   ierr = (*r)(ltog);CHKERRQ(ierr);
1749   PetscFunctionReturn(0);
1750 }
1751 
1752 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1753 
1754 /*@C
1755   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1756 
1757   Not Collective
1758 
1759   Level: advanced
1760 
1761 .keywords: IS, register, all
1762 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1763 @*/
1764 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1765 {
1766   PetscErrorCode ierr;
1767 
1768   PetscFunctionBegin;
1769   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1770   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1771 
1772   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1773   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1774   PetscFunctionReturn(0);
1775 }
1776 
1777