xref: /petsc/src/vec/is/utils/isltog.c (revision 609bdbee21ea3be08735c64dbe00a9ab27759925)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 typedef struct {
10   PetscInt    *globals;
11 } ISLocalToGlobalMapping_Basic;
12 
13 typedef struct {
14   PetscHashI globalht;
15 } ISLocalToGlobalMapping_Hash;
16 
17 
18 /* -----------------------------------------------------------------------------------------*/
19 
20 /*
21     Creates the global mapping information in the ISLocalToGlobalMapping structure
22 
23     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
24 */
25 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
26 {
27   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   if (mapping->data) PetscFunctionReturn(0);
32   end   = 0;
33   start = PETSC_MAX_INT;
34 
35   for (i=0; i<n; i++) {
36     if (idx[i] < 0) continue;
37     if (idx[i] < start) start = idx[i];
38     if (idx[i] > end)   end   = idx[i];
39   }
40   if (start > end) {start = 0; end = -1;}
41   mapping->globalstart = start;
42   mapping->globalend   = end;
43   if (!((PetscObject)mapping)->type_name) {
44     if ((end - start) > PetscMax(4*n,1000000)) {
45       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);
46     } else {
47       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);
48     }
49   }
50   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
51   PetscFunctionReturn(0);
52 }
53 
54 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
55 {
56   PetscErrorCode              ierr;
57   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
58   ISLocalToGlobalMapping_Basic *map;
59 
60   PetscFunctionBegin;
61   start            = mapping->globalstart;
62   end              = mapping->globalend;
63   ierr             = PetscNew(&map);CHKERRQ(ierr);
64   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
65   map->globals     = globals;
66   for (i=0; i<end-start+1; i++) globals[i] = -1;
67   for (i=0; i<n; i++) {
68     if (idx[i] < 0) continue;
69     globals[idx[i] - start] = i;
70   }
71   mapping->data = (void*)map;
72   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
73   PetscFunctionReturn(0);
74 }
75 
76 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
77 {
78   PetscErrorCode              ierr;
79   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
80   ISLocalToGlobalMapping_Hash *map;
81 
82   PetscFunctionBegin;
83   ierr = PetscNew(&map);CHKERRQ(ierr);
84   PetscHashICreate(map->globalht);
85   for (i=0; i<n; i++ ) {
86     if (idx[i] < 0) continue;
87     PetscHashIAdd(map->globalht, idx[i], i);
88   }
89   mapping->data = (void*)map;
90   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
95 {
96   PetscErrorCode              ierr;
97   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
98 
99   PetscFunctionBegin;
100   if (!map) PetscFunctionReturn(0);
101   ierr = PetscFree(map->globals);CHKERRQ(ierr);
102   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
103   PetscFunctionReturn(0);
104 }
105 
106 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
107 {
108   PetscErrorCode              ierr;
109   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
110 
111   PetscFunctionBegin;
112   if (!map) PetscFunctionReturn(0);
113   PetscHashIDestroy(map->globalht);
114   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
115   PetscFunctionReturn(0);
116 }
117 
118 #define GTOLTYPE _Basic
119 #define GTOLNAME _Basic
120 #define GTOL(g, local) do {                  \
121     local = map->globals[g/bs - start];      \
122     local = bs*local + (g % bs);             \
123   } while (0)
124 #include <../src/vec/is/utils/isltog.h>
125 
126 #define GTOLTYPE _Basic
127 #define GTOLNAME Block_Basic
128 #define GTOL(g, local) do {                  \
129     local = map->globals[g - start];         \
130   } while (0)
131 #include <../src/vec/is/utils/isltog.h>
132 
133 #define GTOLTYPE _Hash
134 #define GTOLNAME _Hash
135 #define GTOL(g, local) do {                  \
136     PetscHashIMap(map->globalht,g/bs,local); \
137     local = bs*local + (g % bs);             \
138    } while (0)
139 #include <../src/vec/is/utils/isltog.h>
140 
141 #define GTOLTYPE _Hash
142 #define GTOLNAME Block_Hash
143 #define GTOL(g, local) do {                  \
144     PetscHashIMap(map->globalht,g,local);    \
145   } while (0)
146 #include <../src/vec/is/utils/isltog.h>
147 
148 /*@
149     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
150 
151     Not Collective
152 
153     Input Parameter:
154 .   ltog - local to global mapping
155 
156     Output Parameter:
157 .   nltog - the duplicated local to global mapping
158 
159     Level: advanced
160 
161     Concepts: mapping^local to global
162 
163 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
164 @*/
165 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
166 {
167   PetscErrorCode ierr;
168 
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
171   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 /*@
176     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
177 
178     Not Collective
179 
180     Input Parameter:
181 .   ltog - local to global mapping
182 
183     Output Parameter:
184 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
185 
186     Level: advanced
187 
188     Concepts: mapping^local to global
189 
190 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
191 @*/
192 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
193 {
194   PetscFunctionBegin;
195   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
196   PetscValidIntPointer(n,2);
197   *n = mapping->bs*mapping->n;
198   PetscFunctionReturn(0);
199 }
200 
201 /*@C
202     ISLocalToGlobalMappingView - View a local to global mapping
203 
204     Not Collective
205 
206     Input Parameters:
207 +   ltog - local to global mapping
208 -   viewer - viewer
209 
210     Level: advanced
211 
212     Concepts: mapping^local to global
213 
214 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
215 @*/
216 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
217 {
218   PetscInt       i;
219   PetscMPIInt    rank;
220   PetscBool      iascii;
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
225   if (!viewer) {
226     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
227   }
228   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
229 
230   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
231   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
232   if (iascii) {
233     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
234     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
235     for (i=0; i<mapping->n; i++) {
236       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
237     }
238     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
239     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
240   }
241   PetscFunctionReturn(0);
242 }
243 
244 /*@
245     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
246     ordering and a global parallel ordering.
247 
248     Not collective
249 
250     Input Parameter:
251 .   is - index set containing the global numbers for each local number
252 
253     Output Parameter:
254 .   mapping - new mapping data structure
255 
256     Notes: the block size of the IS determines the block size of the mapping
257     Level: advanced
258 
259     Concepts: mapping^local to global
260 
261 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
262 @*/
263 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
264 {
265   PetscErrorCode ierr;
266   PetscInt       n,bs;
267   const PetscInt *indices;
268   MPI_Comm       comm;
269   PetscBool      isblock;
270 
271   PetscFunctionBegin;
272   PetscValidHeaderSpecific(is,IS_CLASSID,1);
273   PetscValidPointer(mapping,2);
274 
275   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
276   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
277   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
278   if (!isblock) {
279     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
280     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
281     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
282   } else {
283     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
284     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
285     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
286     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
287   }
288   PetscFunctionReturn(0);
289 }
290 
291 /*@C
292     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
293     ordering and a global parallel ordering.
294 
295     Collective
296 
297     Input Parameter:
298 +   sf - star forest mapping contiguous local indices to (rank, offset)
299 -   start - first global index on this process
300 
301     Output Parameter:
302 .   mapping - new mapping data structure
303 
304     Level: advanced
305 
306     Concepts: mapping^local to global
307 
308 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
309 @*/
310 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
311 {
312   PetscErrorCode ierr;
313   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
314   const PetscInt *ilocal;
315   MPI_Comm       comm;
316 
317   PetscFunctionBegin;
318   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
319   PetscValidPointer(mapping,3);
320 
321   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
322   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
323   if (ilocal) {
324     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
325   }
326   else maxlocal = nleaves;
327   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
328   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
329   for (i=0; i<nroots; i++) globals[i] = start + i;
330   for (i=0; i<maxlocal; i++) ltog[i] = -1;
331   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
332   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
333   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
334   ierr = PetscFree(globals);CHKERRQ(ierr);
335   PetscFunctionReturn(0);
336 }
337 
338 /*@
339     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
340 
341     Not collective
342 
343     Input Parameters:
344 .   mapping - mapping data structure
345 .   bs - the blocksize
346 
347     Level: advanced
348 
349     Concepts: mapping^local to global
350 
351 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
352 @*/
353 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
354 {
355   PetscInt       *nid;
356   const PetscInt *oid;
357   PetscInt       i,cn,on,obs,nn;
358   PetscErrorCode ierr;
359 
360   PetscFunctionBegin;
361   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
362   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
363   if (bs == mapping->bs) PetscFunctionReturn(0);
364   on  = mapping->n;
365   obs = mapping->bs;
366   oid = mapping->indices;
367   nn  = (on*obs)/bs;
368   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
369 
370   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
371   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
372   for (i=0;i<nn;i++) {
373     PetscInt j;
374     for (j=0,cn=0;j<bs-1;j++) {
375       if (oid[i*bs+j] < 0) { cn++; continue; }
376       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
377     }
378     if (oid[i*bs+j] < 0) cn++;
379     if (cn) {
380       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
381       nid[i] = -1;
382     } else {
383       nid[i] = oid[i*bs]/bs;
384     }
385   }
386   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
387 
388   mapping->n           = nn;
389   mapping->bs          = bs;
390   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
391   mapping->indices     = nid;
392   mapping->globalstart = 0;
393   mapping->globalend   = 0;
394   if (mapping->ops->destroy) {
395     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
396   }
397   PetscFunctionReturn(0);
398 }
399 
400 /*@
401     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
402     ordering and a global parallel ordering.
403 
404     Not Collective
405 
406     Input Parameters:
407 .   mapping - mapping data structure
408 
409     Output Parameter:
410 .   bs - the blocksize
411 
412     Level: advanced
413 
414     Concepts: mapping^local to global
415 
416 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
417 @*/
418 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
419 {
420   PetscFunctionBegin;
421   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
422   *bs = mapping->bs;
423   PetscFunctionReturn(0);
424 }
425 
426 /*@
427     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
428     ordering and a global parallel ordering.
429 
430     Not Collective, but communicator may have more than one process
431 
432     Input Parameters:
433 +   comm - MPI communicator
434 .   bs - the block size
435 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
436 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
437 -   mode - see PetscCopyMode
438 
439     Output Parameter:
440 .   mapping - new mapping data structure
441 
442     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
443 
444     For "small" problems when using ISGlobalToMappingApply() and ISGlobalToMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
445     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
446     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
447 
448     Level: advanced
449 
450     Concepts: mapping^local to global
451 
452 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
453           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
454 @*/
455 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
456 {
457   PetscErrorCode ierr;
458   PetscInt       *in;
459 
460   PetscFunctionBegin;
461   if (n) PetscValidIntPointer(indices,3);
462   PetscValidPointer(mapping,4);
463 
464   *mapping = NULL;
465   ierr = ISInitializePackage();CHKERRQ(ierr);
466 
467   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
468                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
469   (*mapping)->n             = n;
470   (*mapping)->bs            = bs;
471   (*mapping)->info_cached   = PETSC_FALSE;
472   (*mapping)->info_free     = PETSC_FALSE;
473   (*mapping)->info_procs    = NULL;
474   (*mapping)->info_numprocs = NULL;
475   (*mapping)->info_indices  = NULL;
476 
477   (*mapping)->ops->globaltolocalmappingapply      = NULL;
478   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
479   (*mapping)->ops->destroy                        = NULL;
480   if (mode == PETSC_COPY_VALUES) {
481     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
482     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
483     (*mapping)->indices = in;
484     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
485   } else if (mode == PETSC_OWN_POINTER) {
486     (*mapping)->indices = (PetscInt*)indices;
487     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
488   }
489   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
490   PetscFunctionReturn(0);
491 }
492 
493 PetscFunctionList ISLocalToGlobalMappingList = NULL;
494 
495 
496 /*@
497    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
498 
499    Not collective
500 
501    Input Parameters:
502 .  mapping - mapping data structure
503 
504    Level: advanced
505 
506 @*/
507 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
508 {
509   PetscErrorCode             ierr;
510   char                       type[256];
511   ISLocalToGlobalMappingType defaulttype = "Not set";
512   PetscBool                  flg;
513 
514   PetscFunctionBegin;
515   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
516   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
517   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
518   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
519   if (flg) {
520     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
521   }
522   ierr = PetscOptionsEnd();CHKERRQ(ierr);
523   PetscFunctionReturn(0);
524 }
525 
526 /*@
527    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
528    ordering and a global parallel ordering.
529 
530    Note Collective
531 
532    Input Parameters:
533 .  mapping - mapping data structure
534 
535    Level: advanced
536 
537 .seealso: ISLocalToGlobalMappingCreate()
538 @*/
539 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
540 {
541   PetscErrorCode ierr;
542 
543   PetscFunctionBegin;
544   if (!*mapping) PetscFunctionReturn(0);
545   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
546   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
547   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
548   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
549   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
550   if ((*mapping)->info_indices) {
551     PetscInt i;
552 
553     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
554     for (i=1; i<(*mapping)->info_nproc; i++) {
555       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
556     }
557     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
558   }
559   if ((*mapping)->ops->destroy) {
560     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
561   }
562   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
563   *mapping = 0;
564   PetscFunctionReturn(0);
565 }
566 
567 /*@
568     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
569     a new index set using the global numbering defined in an ISLocalToGlobalMapping
570     context.
571 
572     Not collective
573 
574     Input Parameters:
575 +   mapping - mapping between local and global numbering
576 -   is - index set in local numbering
577 
578     Output Parameters:
579 .   newis - index set in global numbering
580 
581     Level: advanced
582 
583     Concepts: mapping^local to global
584 
585 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
586           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
587 @*/
588 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
589 {
590   PetscErrorCode ierr;
591   PetscInt       n,*idxout;
592   const PetscInt *idxin;
593 
594   PetscFunctionBegin;
595   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
596   PetscValidHeaderSpecific(is,IS_CLASSID,2);
597   PetscValidPointer(newis,3);
598 
599   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
600   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
601   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
602   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
603   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
604   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
605   PetscFunctionReturn(0);
606 }
607 
608 /*@
609    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
610    and converts them to the global numbering.
611 
612    Not collective
613 
614    Input Parameters:
615 +  mapping - the local to global mapping context
616 .  N - number of integers
617 -  in - input indices in local numbering
618 
619    Output Parameter:
620 .  out - indices in global numbering
621 
622    Notes:
623    The in and out array parameters may be identical.
624 
625    Level: advanced
626 
627 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
628           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
629           AOPetscToApplication(), ISGlobalToLocalMappingApply()
630 
631     Concepts: mapping^local to global
632 @*/
633 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
634 {
635   PetscInt i,bs,Nmax;
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
639   bs   = mapping->bs;
640   Nmax = bs*mapping->n;
641   if (bs == 1) {
642     const PetscInt *idx = mapping->indices;
643     for (i=0; i<N; i++) {
644       if (in[i] < 0) {
645         out[i] = in[i];
646         continue;
647       }
648       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
649       out[i] = idx[in[i]];
650     }
651   } else {
652     const PetscInt *idx = mapping->indices;
653     for (i=0; i<N; i++) {
654       if (in[i] < 0) {
655         out[i] = in[i];
656         continue;
657       }
658       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
659       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
660     }
661   }
662   PetscFunctionReturn(0);
663 }
664 
665 /*@
666    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
667 
668    Not collective
669 
670    Input Parameters:
671 +  mapping - the local to global mapping context
672 .  N - number of integers
673 -  in - input indices in local block numbering
674 
675    Output Parameter:
676 .  out - indices in global block numbering
677 
678    Notes:
679    The in and out array parameters may be identical.
680 
681    Example:
682      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
683      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
684 
685    Level: advanced
686 
687 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
688           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
689           AOPetscToApplication(), ISGlobalToLocalMappingApply()
690 
691     Concepts: mapping^local to global
692 @*/
693 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
694 {
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
698   {
699     PetscInt i,Nmax = mapping->n;
700     const PetscInt *idx = mapping->indices;
701 
702     for (i=0; i<N; i++) {
703       if (in[i] < 0) {
704         out[i] = in[i];
705         continue;
706       }
707       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
708       out[i] = idx[in[i]];
709     }
710   }
711   PetscFunctionReturn(0);
712 }
713 
714 /*@
715     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
716     specified with a global numbering.
717 
718     Not collective
719 
720     Input Parameters:
721 +   mapping - mapping between local and global numbering
722 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
723            IS_GTOLM_DROP - drops the indices with no local value from the output list
724 .   n - number of global indices to map
725 -   idx - global indices to map
726 
727     Output Parameters:
728 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
729 -   idxout - local index of each global index, one must pass in an array long enough
730              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
731              idxout == NULL to determine the required length (returned in nout)
732              and then allocate the required space and call ISGlobalToLocalMappingApply()
733              a second time to set the values.
734 
735     Notes:
736     Either nout or idxout may be NULL. idx and idxout may be identical.
737 
738     For "small" problems when using ISGlobalToMappingApply() and ISGlobalToMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
739     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
740     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
741 
742     Level: advanced
743 
744     Developer Note: The manual page states that idx and idxout may be identical but the calling
745        sequence declares idx as const so it cannot be the same as idxout.
746 
747     Concepts: mapping^global to local
748 
749 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
750           ISLocalToGlobalMappingDestroy()
751 @*/
752 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
753 {
754   PetscErrorCode ierr;
755 
756   PetscFunctionBegin;
757   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
758   if (!mapping->data) {
759     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
760   }
761   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 /*@
766     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
767     a new index set using the local numbering defined in an ISLocalToGlobalMapping
768     context.
769 
770     Not collective
771 
772     Input Parameters:
773 +   mapping - mapping between local and global numbering
774 -   is - index set in global numbering
775 
776     Output Parameters:
777 .   newis - index set in local numbering
778 
779     Level: advanced
780 
781     Concepts: mapping^local to global
782 
783 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
784           ISLocalToGlobalMappingDestroy()
785 @*/
786 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, IS is,IS *newis)
787 {
788   PetscErrorCode ierr;
789   PetscInt       n,nout,*idxout;
790   const PetscInt *idxin;
791 
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
794   PetscValidHeaderSpecific(is,IS_CLASSID,3);
795   PetscValidPointer(newis,4);
796 
797   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
798   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
799   if (type == IS_GTOLM_MASK) {
800     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
801   } else {
802     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
803     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
804   }
805   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
806   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
807   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 /*@
812     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
813     specified with a block global numbering.
814 
815     Not collective
816 
817     Input Parameters:
818 +   mapping - mapping between local and global numbering
819 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
820            IS_GTOLM_DROP - drops the indices with no local value from the output list
821 .   n - number of global indices to map
822 -   idx - global indices to map
823 
824     Output Parameters:
825 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
826 -   idxout - local index of each global index, one must pass in an array long enough
827              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
828              idxout == NULL to determine the required length (returned in nout)
829              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
830              a second time to set the values.
831 
832     Notes:
833     Either nout or idxout may be NULL. idx and idxout may be identical.
834 
835     For "small" problems when using ISGlobalToMappingApply() and ISGlobalToMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
836     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
837     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
838 
839     Level: advanced
840 
841     Developer Note: The manual page states that idx and idxout may be identical but the calling
842        sequence declares idx as const so it cannot be the same as idxout.
843 
844     Concepts: mapping^global to local
845 
846 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
847           ISLocalToGlobalMappingDestroy()
848 @*/
849 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
850                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
851 {
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
856   if (!mapping->data) {
857     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
858   }
859   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 
864 /*@C
865     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
866      each index shared by more than one processor
867 
868     Collective on ISLocalToGlobalMapping
869 
870     Input Parameters:
871 .   mapping - the mapping from local to global indexing
872 
873     Output Parameter:
874 +   nproc - number of processors that are connected to this one
875 .   proc - neighboring processors
876 .   numproc - number of indices for each subdomain (processor)
877 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
878 
879     Level: advanced
880 
881     Concepts: mapping^local to global
882 
883     Fortran Usage:
884 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
885 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
886           PetscInt indices[nproc][numprocmax],ierr)
887         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
888         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
889 
890 
891 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
892           ISLocalToGlobalMappingRestoreInfo()
893 @*/
894 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
895 {
896   PetscErrorCode ierr;
897 
898   PetscFunctionBegin;
899   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
900   if (mapping->info_cached) {
901     *nproc    = mapping->info_nproc;
902     *procs    = mapping->info_procs;
903     *numprocs = mapping->info_numprocs;
904     *indices  = mapping->info_indices;
905   } else {
906     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
907   }
908   PetscFunctionReturn(0);
909 }
910 
911 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
912 {
913   PetscErrorCode ierr;
914   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
915   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
916   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
917   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
918   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
919   PetscInt       first_procs,first_numprocs,*first_indices;
920   MPI_Request    *recv_waits,*send_waits;
921   MPI_Status     recv_status,*send_status,*recv_statuses;
922   MPI_Comm       comm;
923   PetscBool      debug = PETSC_FALSE;
924 
925   PetscFunctionBegin;
926   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
927   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
928   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
929   if (size == 1) {
930     *nproc         = 0;
931     *procs         = NULL;
932     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
933     (*numprocs)[0] = 0;
934     ierr           = PetscNew(indices);CHKERRQ(ierr);
935     (*indices)[0]  = NULL;
936     /* save info for reuse */
937     mapping->info_nproc = *nproc;
938     mapping->info_procs = *procs;
939     mapping->info_numprocs = *numprocs;
940     mapping->info_indices = *indices;
941     mapping->info_cached = PETSC_TRUE;
942     PetscFunctionReturn(0);
943   }
944 
945   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
946 
947   /*
948     Notes on ISLocalToGlobalMappingGetBlockInfo
949 
950     globally owned node - the nodes that have been assigned to this processor in global
951            numbering, just for this routine.
952 
953     nontrivial globally owned node - node assigned to this processor that is on a subdomain
954            boundary (i.e. is has more than one local owner)
955 
956     locally owned node - node that exists on this processors subdomain
957 
958     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
959            local subdomain
960   */
961   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
962   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
963   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
964 
965   for (i=0; i<n; i++) {
966     if (lindices[i] > max) max = lindices[i];
967   }
968   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
969   Ng++;
970   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
971   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
972   scale  = Ng/size + 1;
973   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
974   rstart = scale*rank;
975 
976   /* determine ownership ranges of global indices */
977   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
978   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
979 
980   /* determine owners of each local node  */
981   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
982   for (i=0; i<n; i++) {
983     proc             = lindices[i]/scale; /* processor that globally owns this index */
984     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
985     owner[i]         = proc;
986     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
987   }
988   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
989   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
990 
991   /* inform other processors of number of messages and max length*/
992   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
993   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
994 
995   /* post receives for owned rows */
996   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
997   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
998   for (i=0; i<nrecvs; i++) {
999     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1000   }
1001 
1002   /* pack messages containing lists of local nodes to owners */
1003   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1004   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1005   starts[0] = 0;
1006   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1007   for (i=0; i<n; i++) {
1008     sends[starts[owner[i]]++] = lindices[i];
1009     sends[starts[owner[i]]++] = i;
1010   }
1011   ierr = PetscFree(owner);CHKERRQ(ierr);
1012   starts[0] = 0;
1013   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1014 
1015   /* send the messages */
1016   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1017   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1018   cnt = 0;
1019   for (i=0; i<size; i++) {
1020     if (nprocs[2*i]) {
1021       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1022       dest[cnt] = i;
1023       cnt++;
1024     }
1025   }
1026   ierr = PetscFree(starts);CHKERRQ(ierr);
1027 
1028   /* wait on receives */
1029   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1030   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1031   cnt  = nrecvs;
1032   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1033   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1034   while (cnt) {
1035     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1036     /* unpack receives into our local space */
1037     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1038     source[imdex] = recv_status.MPI_SOURCE;
1039     len[imdex]    = len[imdex]/2;
1040     /* count how many local owners for each of my global owned indices */
1041     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1042     cnt--;
1043   }
1044   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1045 
1046   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1047   nowned  = 0;
1048   nownedm = 0;
1049   for (i=0; i<ng; i++) {
1050     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1051   }
1052 
1053   /* create single array to contain rank of all local owners of each globally owned index */
1054   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1055   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1056   starts[0] = 0;
1057   for (i=1; i<ng; i++) {
1058     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1059     else starts[i] = starts[i-1];
1060   }
1061 
1062   /* for each nontrival globally owned node list all arriving processors */
1063   for (i=0; i<nrecvs; i++) {
1064     for (j=0; j<len[i]; j++) {
1065       node = recvs[2*i*nmax+2*j]-rstart;
1066       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1067     }
1068   }
1069 
1070   if (debug) { /* -----------------------------------  */
1071     starts[0] = 0;
1072     for (i=1; i<ng; i++) {
1073       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1074       else starts[i] = starts[i-1];
1075     }
1076     for (i=0; i<ng; i++) {
1077       if (nownedsenders[i] > 1) {
1078         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1079         for (j=0; j<nownedsenders[i]; j++) {
1080           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1081         }
1082         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1083       }
1084     }
1085     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1086   } /* -----------------------------------  */
1087 
1088   /* wait on original sends */
1089   if (nsends) {
1090     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1091     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1092     ierr = PetscFree(send_status);CHKERRQ(ierr);
1093   }
1094   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1095   ierr = PetscFree(sends);CHKERRQ(ierr);
1096   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1097 
1098   /* pack messages to send back to local owners */
1099   starts[0] = 0;
1100   for (i=1; i<ng; i++) {
1101     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1102     else starts[i] = starts[i-1];
1103   }
1104   nsends2 = nrecvs;
1105   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1106   for (i=0; i<nrecvs; i++) {
1107     nprocs[i] = 1;
1108     for (j=0; j<len[i]; j++) {
1109       node = recvs[2*i*nmax+2*j]-rstart;
1110       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1111     }
1112   }
1113   nt = 0;
1114   for (i=0; i<nsends2; i++) nt += nprocs[i];
1115 
1116   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1117   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1118 
1119   starts2[0] = 0;
1120   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1121   /*
1122      Each message is 1 + nprocs[i] long, and consists of
1123        (0) the number of nodes being sent back
1124        (1) the local node number,
1125        (2) the number of processors sharing it,
1126        (3) the processors sharing it
1127   */
1128   for (i=0; i<nsends2; i++) {
1129     cnt = 1;
1130     sends2[starts2[i]] = 0;
1131     for (j=0; j<len[i]; j++) {
1132       node = recvs[2*i*nmax+2*j]-rstart;
1133       if (nownedsenders[node] > 1) {
1134         sends2[starts2[i]]++;
1135         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1136         sends2[starts2[i]+cnt++] = nownedsenders[node];
1137         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1138         cnt += nownedsenders[node];
1139       }
1140     }
1141   }
1142 
1143   /* receive the message lengths */
1144   nrecvs2 = nsends;
1145   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1146   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1147   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1148   for (i=0; i<nrecvs2; i++) {
1149     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1150   }
1151 
1152   /* send the message lengths */
1153   for (i=0; i<nsends2; i++) {
1154     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1155   }
1156 
1157   /* wait on receives of lens */
1158   if (nrecvs2) {
1159     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1160     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1161     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1162   }
1163   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1164 
1165   starts3[0] = 0;
1166   nt         = 0;
1167   for (i=0; i<nrecvs2-1; i++) {
1168     starts3[i+1] = starts3[i] + lens2[i];
1169     nt          += lens2[i];
1170   }
1171   if (nrecvs2) nt += lens2[nrecvs2-1];
1172 
1173   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1174   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1175   for (i=0; i<nrecvs2; i++) {
1176     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1177   }
1178 
1179   /* send the messages */
1180   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1181   for (i=0; i<nsends2; i++) {
1182     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1183   }
1184 
1185   /* wait on receives */
1186   if (nrecvs2) {
1187     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1188     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1189     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1190   }
1191   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1192   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1193 
1194   if (debug) { /* -----------------------------------  */
1195     cnt = 0;
1196     for (i=0; i<nrecvs2; i++) {
1197       nt = recvs2[cnt++];
1198       for (j=0; j<nt; j++) {
1199         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1200         for (k=0; k<recvs2[cnt+1]; k++) {
1201           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1202         }
1203         cnt += 2 + recvs2[cnt+1];
1204         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1205       }
1206     }
1207     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1208   } /* -----------------------------------  */
1209 
1210   /* count number subdomains for each local node */
1211   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1212   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1213   cnt  = 0;
1214   for (i=0; i<nrecvs2; i++) {
1215     nt = recvs2[cnt++];
1216     for (j=0; j<nt; j++) {
1217       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1218       cnt += 2 + recvs2[cnt+1];
1219     }
1220   }
1221   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1222   *nproc    = nt;
1223   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1224   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1225   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1226   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1227   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1228   cnt  = 0;
1229   for (i=0; i<size; i++) {
1230     if (nprocs[i] > 0) {
1231       bprocs[i]        = cnt;
1232       (*procs)[cnt]    = i;
1233       (*numprocs)[cnt] = nprocs[i];
1234       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1235       cnt++;
1236     }
1237   }
1238 
1239   /* make the list of subdomains for each nontrivial local node */
1240   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1241   cnt  = 0;
1242   for (i=0; i<nrecvs2; i++) {
1243     nt = recvs2[cnt++];
1244     for (j=0; j<nt; j++) {
1245       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1246       cnt += 2 + recvs2[cnt+1];
1247     }
1248   }
1249   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1250   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1251 
1252   /* sort the node indexing by their global numbers */
1253   nt = *nproc;
1254   for (i=0; i<nt; i++) {
1255     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1256     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1257     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1258     ierr = PetscFree(tmp);CHKERRQ(ierr);
1259   }
1260 
1261   if (debug) { /* -----------------------------------  */
1262     nt = *nproc;
1263     for (i=0; i<nt; i++) {
1264       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1265       for (j=0; j<(*numprocs)[i]; j++) {
1266         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1267       }
1268       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1269     }
1270     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1271   } /* -----------------------------------  */
1272 
1273   /* wait on sends */
1274   if (nsends2) {
1275     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1276     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1277     ierr = PetscFree(send_status);CHKERRQ(ierr);
1278   }
1279 
1280   ierr = PetscFree(starts3);CHKERRQ(ierr);
1281   ierr = PetscFree(dest);CHKERRQ(ierr);
1282   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1283 
1284   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1285   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1286   ierr = PetscFree(starts);CHKERRQ(ierr);
1287   ierr = PetscFree(starts2);CHKERRQ(ierr);
1288   ierr = PetscFree(lens2);CHKERRQ(ierr);
1289 
1290   ierr = PetscFree(source);CHKERRQ(ierr);
1291   ierr = PetscFree(len);CHKERRQ(ierr);
1292   ierr = PetscFree(recvs);CHKERRQ(ierr);
1293   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1294   ierr = PetscFree(sends2);CHKERRQ(ierr);
1295 
1296   /* put the information about myself as the first entry in the list */
1297   first_procs    = (*procs)[0];
1298   first_numprocs = (*numprocs)[0];
1299   first_indices  = (*indices)[0];
1300   for (i=0; i<*nproc; i++) {
1301     if ((*procs)[i] == rank) {
1302       (*procs)[0]    = (*procs)[i];
1303       (*numprocs)[0] = (*numprocs)[i];
1304       (*indices)[0]  = (*indices)[i];
1305       (*procs)[i]    = first_procs;
1306       (*numprocs)[i] = first_numprocs;
1307       (*indices)[i]  = first_indices;
1308       break;
1309     }
1310   }
1311 
1312   /* save info for reuse */
1313   mapping->info_nproc = *nproc;
1314   mapping->info_procs = *procs;
1315   mapping->info_numprocs = *numprocs;
1316   mapping->info_indices = *indices;
1317   mapping->info_cached = PETSC_TRUE;
1318   PetscFunctionReturn(0);
1319 }
1320 
1321 /*@C
1322     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1323 
1324     Collective on ISLocalToGlobalMapping
1325 
1326     Input Parameters:
1327 .   mapping - the mapping from local to global indexing
1328 
1329     Output Parameter:
1330 +   nproc - number of processors that are connected to this one
1331 .   proc - neighboring processors
1332 .   numproc - number of indices for each processor
1333 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1334 
1335     Level: advanced
1336 
1337 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1338           ISLocalToGlobalMappingGetInfo()
1339 @*/
1340 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1341 {
1342   PetscErrorCode ierr;
1343 
1344   PetscFunctionBegin;
1345   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1346   if (mapping->info_free) {
1347     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1348     if (*indices) {
1349       PetscInt i;
1350 
1351       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1352       for (i=1; i<*nproc; i++) {
1353         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1354       }
1355       ierr = PetscFree(*indices);CHKERRQ(ierr);
1356     }
1357   }
1358   *nproc    = 0;
1359   *procs    = NULL;
1360   *numprocs = NULL;
1361   *indices  = NULL;
1362   PetscFunctionReturn(0);
1363 }
1364 
1365 /*@C
1366     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1367      each index shared by more than one processor
1368 
1369     Collective on ISLocalToGlobalMapping
1370 
1371     Input Parameters:
1372 .   mapping - the mapping from local to global indexing
1373 
1374     Output Parameter:
1375 +   nproc - number of processors that are connected to this one
1376 .   proc - neighboring processors
1377 .   numproc - number of indices for each subdomain (processor)
1378 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1379 
1380     Level: advanced
1381 
1382     Concepts: mapping^local to global
1383 
1384     Fortran Usage:
1385 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1386 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1387           PetscInt indices[nproc][numprocmax],ierr)
1388         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1389         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1390 
1391 
1392 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1393           ISLocalToGlobalMappingRestoreInfo()
1394 @*/
1395 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1396 {
1397   PetscErrorCode ierr;
1398   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1399 
1400   PetscFunctionBegin;
1401   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1402   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1403   if (bs > 1) { /* we need to expand the cached info */
1404     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1405     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1406     for (i=0; i<*nproc; i++) {
1407       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1408       for (j=0; j<bnumprocs[i]; j++) {
1409         for (k=0; k<bs; k++) {
1410           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1411         }
1412       }
1413       (*numprocs)[i] = bnumprocs[i]*bs;
1414     }
1415     mapping->info_free = PETSC_TRUE;
1416   } else {
1417     *numprocs = bnumprocs;
1418     *indices  = bindices;
1419   }
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 /*@C
1424     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1425 
1426     Collective on ISLocalToGlobalMapping
1427 
1428     Input Parameters:
1429 .   mapping - the mapping from local to global indexing
1430 
1431     Output Parameter:
1432 +   nproc - number of processors that are connected to this one
1433 .   proc - neighboring processors
1434 .   numproc - number of indices for each processor
1435 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1436 
1437     Level: advanced
1438 
1439 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1440           ISLocalToGlobalMappingGetInfo()
1441 @*/
1442 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1443 {
1444   PetscErrorCode ierr;
1445 
1446   PetscFunctionBegin;
1447   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1448   PetscFunctionReturn(0);
1449 }
1450 
1451 /*@C
1452    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1453 
1454    Not Collective
1455 
1456    Input Arguments:
1457 . ltog - local to global mapping
1458 
1459    Output Arguments:
1460 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1461 
1462    Level: advanced
1463 
1464    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1465 
1466 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1467 @*/
1468 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1469 {
1470   PetscFunctionBegin;
1471   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1472   PetscValidPointer(array,2);
1473   if (ltog->bs == 1) {
1474     *array = ltog->indices;
1475   } else {
1476     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1477     const PetscInt *ii;
1478     PetscErrorCode ierr;
1479 
1480     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1481     *array = jj;
1482     k    = 0;
1483     ii   = ltog->indices;
1484     for (i=0; i<n; i++)
1485       for (j=0; j<bs; j++)
1486         jj[k++] = bs*ii[i] + j;
1487   }
1488   PetscFunctionReturn(0);
1489 }
1490 
1491 /*@C
1492    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1493 
1494    Not Collective
1495 
1496    Input Arguments:
1497 + ltog - local to global mapping
1498 - array - array of indices
1499 
1500    Level: advanced
1501 
1502 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1503 @*/
1504 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1505 {
1506   PetscFunctionBegin;
1507   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1508   PetscValidPointer(array,2);
1509   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1510 
1511   if (ltog->bs > 1) {
1512     PetscErrorCode ierr;
1513     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1514   }
1515   PetscFunctionReturn(0);
1516 }
1517 
1518 /*@C
1519    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1520 
1521    Not Collective
1522 
1523    Input Arguments:
1524 . ltog - local to global mapping
1525 
1526    Output Arguments:
1527 . array - array of indices
1528 
1529    Level: advanced
1530 
1531 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1532 @*/
1533 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1534 {
1535   PetscFunctionBegin;
1536   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1537   PetscValidPointer(array,2);
1538   *array = ltog->indices;
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 /*@C
1543    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1544 
1545    Not Collective
1546 
1547    Input Arguments:
1548 + ltog - local to global mapping
1549 - array - array of indices
1550 
1551    Level: advanced
1552 
1553 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1554 @*/
1555 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1556 {
1557   PetscFunctionBegin;
1558   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1559   PetscValidPointer(array,2);
1560   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1561   *array = NULL;
1562   PetscFunctionReturn(0);
1563 }
1564 
1565 /*@C
1566    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1567 
1568    Not Collective
1569 
1570    Input Arguments:
1571 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1572 . n - number of mappings to concatenate
1573 - ltogs - local to global mappings
1574 
1575    Output Arguments:
1576 . ltogcat - new mapping
1577 
1578    Note: this currently always returns a mapping with block size of 1
1579 
1580    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1581 
1582    Level: advanced
1583 
1584 .seealso: ISLocalToGlobalMappingCreate()
1585 @*/
1586 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1587 {
1588   PetscInt       i,cnt,m,*idx;
1589   PetscErrorCode ierr;
1590 
1591   PetscFunctionBegin;
1592   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1593   if (n > 0) PetscValidPointer(ltogs,3);
1594   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1595   PetscValidPointer(ltogcat,4);
1596   for (cnt=0,i=0; i<n; i++) {
1597     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1598     cnt += m;
1599   }
1600   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1601   for (cnt=0,i=0; i<n; i++) {
1602     const PetscInt *subidx;
1603     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1604     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1605     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1606     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1607     cnt += m;
1608   }
1609   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1610   PetscFunctionReturn(0);
1611 }
1612 
1613 /*MC
1614       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1615                                     used this is good for only small and moderate size problems.
1616 
1617    Options Database Keys:
1618 +   -islocaltoglobalmapping_type basic - select this method
1619 
1620    Level: beginner
1621 
1622 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1623 M*/
1624 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1625 {
1626   PetscFunctionBegin;
1627   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1628   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1629   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1630   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1631   PetscFunctionReturn(0);
1632 }
1633 
1634 /*MC
1635       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1636                                     used this is good for large memory problems.
1637 
1638    Options Database Keys:
1639 +   -islocaltoglobalmapping_type hash - select this method
1640 
1641 
1642    Notes: This is selected automatically for large problems if the user does not set the type.
1643 
1644    Level: beginner
1645 
1646 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1647 M*/
1648 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1649 {
1650   PetscFunctionBegin;
1651   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1652   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1653   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1654   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1655   PetscFunctionReturn(0);
1656 }
1657 
1658 
1659 /*@C
1660     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1661 
1662    Not Collective
1663 
1664    Input Parameters:
1665 +  sname - name of a new method
1666 -  routine_create - routine to create method context
1667 
1668    Notes:
1669    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1670 
1671    Sample usage:
1672 .vb
1673    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1674 .ve
1675 
1676    Then, your mapping can be chosen with the procedural interface via
1677 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1678    or at runtime via the option
1679 $     -islocaltoglobalmapping_type my_mapper
1680 
1681    Level: advanced
1682 
1683 .keywords: ISLocalToGlobalMappingType, register
1684 
1685 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1686 
1687 @*/
1688 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1689 {
1690   PetscErrorCode ierr;
1691 
1692   PetscFunctionBegin;
1693   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1694   PetscFunctionReturn(0);
1695 }
1696 
1697 /*@C
1698    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1699 
1700    Logically Collective on ISLocalToGlobalMapping
1701 
1702    Input Parameters:
1703 +  ltog - the ISLocalToGlobalMapping object
1704 -  type - a known method
1705 
1706    Options Database Key:
1707 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1708     of available methods (for instance, basic or hash)
1709 
1710    Notes:
1711    See "petsc/include/petscis.h" for available methods
1712 
1713   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1714   then set the ISLocalToGlobalMapping type from the options database rather than by using
1715   this routine.
1716 
1717   Level: intermediate
1718 
1719   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1720   are accessed by ISLocalToGlobalMappingSetType().
1721 
1722 .keywords: ISLocalToGlobalMapping, set, method
1723 
1724 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1725 
1726 @*/
1727 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1728 {
1729   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1730   PetscBool      match;
1731 
1732   PetscFunctionBegin;
1733   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1734   PetscValidCharPointer(type,2);
1735 
1736   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1737   if (match) PetscFunctionReturn(0);
1738 
1739   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1740   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1741   /* Destroy the previous private LTOG context */
1742   if (ltog->ops->destroy) {
1743     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1744     ltog->ops->destroy = NULL;
1745   }
1746   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1747   ierr = (*r)(ltog);CHKERRQ(ierr);
1748   PetscFunctionReturn(0);
1749 }
1750 
1751 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1752 
1753 /*@C
1754   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1755 
1756   Not Collective
1757 
1758   Level: advanced
1759 
1760 .keywords: IS, register, all
1761 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1762 @*/
1763 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1764 {
1765   PetscErrorCode ierr;
1766 
1767   PetscFunctionBegin;
1768   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1769   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1770 
1771   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1772   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1773   PetscFunctionReturn(0);
1774 }
1775 
1776