xref: /petsc/src/vec/is/utils/isltog.c (revision 1dbfe718a013dc69d13bcaeb9539111b66e2efb7)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hash.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt    *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHashI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 /* -----------------------------------------------------------------------------------------*/
20 
21 /*
22     Creates the global mapping information in the ISLocalToGlobalMapping structure
23 
24     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
25 */
26 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
27 {
28   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   if (mapping->data) PetscFunctionReturn(0);
33   end   = 0;
34   start = PETSC_MAX_INT;
35 
36   for (i=0; i<n; i++) {
37     if (idx[i] < 0) continue;
38     if (idx[i] < start) start = idx[i];
39     if (idx[i] > end)   end   = idx[i];
40   }
41   if (start > end) {start = 0; end = -1;}
42   mapping->globalstart = start;
43   mapping->globalend   = end;
44   if (!((PetscObject)mapping)->type_name) {
45     if ((end - start) > PetscMax(4*n,1000000)) {
46       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
47     } else {
48       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
49     }
50   }
51   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
56 {
57   PetscErrorCode              ierr;
58   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
59   ISLocalToGlobalMapping_Basic *map;
60 
61   PetscFunctionBegin;
62   start            = mapping->globalstart;
63   end              = mapping->globalend;
64   ierr             = PetscNew(&map);CHKERRQ(ierr);
65   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
66   map->globals     = globals;
67   for (i=0; i<end-start+1; i++) globals[i] = -1;
68   for (i=0; i<n; i++) {
69     if (idx[i] < 0) continue;
70     globals[idx[i] - start] = i;
71   }
72   mapping->data = (void*)map;
73   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
78 {
79   PetscErrorCode              ierr;
80   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
81   ISLocalToGlobalMapping_Hash *map;
82 
83   PetscFunctionBegin;
84   ierr = PetscNew(&map);CHKERRQ(ierr);
85   PetscHashICreate(map->globalht);
86   for (i=0; i<n; i++ ) {
87     if (idx[i] < 0) continue;
88     PetscHashIAdd(map->globalht, idx[i], i);
89   }
90   mapping->data = (void*)map;
91   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
92   PetscFunctionReturn(0);
93 }
94 
95 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
96 {
97   PetscErrorCode              ierr;
98   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
99 
100   PetscFunctionBegin;
101   if (!map) PetscFunctionReturn(0);
102   ierr = PetscFree(map->globals);CHKERRQ(ierr);
103   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
108 {
109   PetscErrorCode              ierr;
110   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
111 
112   PetscFunctionBegin;
113   if (!map) PetscFunctionReturn(0);
114   PetscHashIDestroy(map->globalht);
115   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
116   PetscFunctionReturn(0);
117 }
118 
119 #define GTOLTYPE _Basic
120 #define GTOLNAME _Basic
121 #define GTOLSTART bs*start
122 #define GTOLEND bs*(end+1)-1
123 #define GTOL(g, local) do {                  \
124     local = map->globals[g/bs - start];      \
125     local = bs*local + (g % bs);             \
126   } while (0)
127 #include <../src/vec/is/utils/isltog.h>
128 
129 #define GTOLTYPE _Basic
130 #define GTOLNAME Block_Basic
131 #define GTOLSTART start
132 #define GTOLEND end
133 #define GTOL(g, local) do {                  \
134     local = map->globals[g - start];         \
135   } while (0)
136 #include <../src/vec/is/utils/isltog.h>
137 
138 #define GTOLTYPE _Hash
139 #define GTOLNAME _Hash
140 #define GTOLSTART bs*start
141 #define GTOLEND bs*(end+1)-1
142 #define GTOL(g, local) do {                  \
143     PetscHashIMap(map->globalht,g/bs,local); \
144     local = bs*local + (g % bs);             \
145    } while (0)
146 #include <../src/vec/is/utils/isltog.h>
147 
148 #define GTOLTYPE _Hash
149 #define GTOLNAME Block_Hash
150 #define GTOLSTART start
151 #define GTOLEND end
152 #define GTOL(g, local) do {                  \
153     PetscHashIMap(map->globalht,g,local);    \
154   } while (0)
155 #include <../src/vec/is/utils/isltog.h>
156 
157 /*@
158     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
159 
160     Not Collective
161 
162     Input Parameter:
163 .   ltog - local to global mapping
164 
165     Output Parameter:
166 .   nltog - the duplicated local to global mapping
167 
168     Level: advanced
169 
170     Concepts: mapping^local to global
171 
172 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
173 @*/
174 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
175 {
176   PetscErrorCode ierr;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
180   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
181   PetscFunctionReturn(0);
182 }
183 
184 /*@
185     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
186 
187     Not Collective
188 
189     Input Parameter:
190 .   ltog - local to global mapping
191 
192     Output Parameter:
193 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
194 
195     Level: advanced
196 
197     Concepts: mapping^local to global
198 
199 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
200 @*/
201 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
202 {
203   PetscFunctionBegin;
204   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
205   PetscValidIntPointer(n,2);
206   *n = mapping->bs*mapping->n;
207   PetscFunctionReturn(0);
208 }
209 
210 /*@C
211     ISLocalToGlobalMappingView - View a local to global mapping
212 
213     Not Collective
214 
215     Input Parameters:
216 +   ltog - local to global mapping
217 -   viewer - viewer
218 
219     Level: advanced
220 
221     Concepts: mapping^local to global
222 
223 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
224 @*/
225 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
226 {
227   PetscInt       i;
228   PetscMPIInt    rank;
229   PetscBool      iascii;
230   PetscErrorCode ierr;
231 
232   PetscFunctionBegin;
233   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
234   if (!viewer) {
235     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
236   }
237   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
238 
239   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
240   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
241   if (iascii) {
242     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
243     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
244     for (i=0; i<mapping->n; i++) {
245       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
246     }
247     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
248     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
249   }
250   PetscFunctionReturn(0);
251 }
252 
253 /*@
254     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
255     ordering and a global parallel ordering.
256 
257     Not collective
258 
259     Input Parameter:
260 .   is - index set containing the global numbers for each local number
261 
262     Output Parameter:
263 .   mapping - new mapping data structure
264 
265     Notes: the block size of the IS determines the block size of the mapping
266     Level: advanced
267 
268     Concepts: mapping^local to global
269 
270 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
271 @*/
272 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
273 {
274   PetscErrorCode ierr;
275   PetscInt       n,bs;
276   const PetscInt *indices;
277   MPI_Comm       comm;
278   PetscBool      isblock;
279 
280   PetscFunctionBegin;
281   PetscValidHeaderSpecific(is,IS_CLASSID,1);
282   PetscValidPointer(mapping,2);
283 
284   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
285   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
286   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
287   if (!isblock) {
288     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
289     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
290     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
291   } else {
292     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
293     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
294     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
295     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
296   }
297   PetscFunctionReturn(0);
298 }
299 
300 /*@C
301     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
302     ordering and a global parallel ordering.
303 
304     Collective
305 
306     Input Parameter:
307 +   sf - star forest mapping contiguous local indices to (rank, offset)
308 -   start - first global index on this process
309 
310     Output Parameter:
311 .   mapping - new mapping data structure
312 
313     Level: advanced
314 
315     Concepts: mapping^local to global
316 
317 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
318 @*/
319 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
320 {
321   PetscErrorCode ierr;
322   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
323   const PetscInt *ilocal;
324   MPI_Comm       comm;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
328   PetscValidPointer(mapping,3);
329 
330   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
331   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
332   if (ilocal) {
333     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
334   }
335   else maxlocal = nleaves;
336   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
337   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
338   for (i=0; i<nroots; i++) globals[i] = start + i;
339   for (i=0; i<maxlocal; i++) ltog[i] = -1;
340   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
341   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
342   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
343   ierr = PetscFree(globals);CHKERRQ(ierr);
344   PetscFunctionReturn(0);
345 }
346 
347 /*@
348     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
349 
350     Not collective
351 
352     Input Parameters:
353 .   mapping - mapping data structure
354 .   bs - the blocksize
355 
356     Level: advanced
357 
358     Concepts: mapping^local to global
359 
360 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
361 @*/
362 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
363 {
364   PetscInt       *nid;
365   const PetscInt *oid;
366   PetscInt       i,cn,on,obs,nn;
367   PetscErrorCode ierr;
368 
369   PetscFunctionBegin;
370   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
371   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
372   if (bs == mapping->bs) PetscFunctionReturn(0);
373   on  = mapping->n;
374   obs = mapping->bs;
375   oid = mapping->indices;
376   nn  = (on*obs)/bs;
377   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
378 
379   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
380   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
381   for (i=0;i<nn;i++) {
382     PetscInt j;
383     for (j=0,cn=0;j<bs-1;j++) {
384       if (oid[i*bs+j] < 0) { cn++; continue; }
385       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
386     }
387     if (oid[i*bs+j] < 0) cn++;
388     if (cn) {
389       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
390       nid[i] = -1;
391     } else {
392       nid[i] = oid[i*bs]/bs;
393     }
394   }
395   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
396 
397   mapping->n           = nn;
398   mapping->bs          = bs;
399   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
400   mapping->indices     = nid;
401   mapping->globalstart = 0;
402   mapping->globalend   = 0;
403   if (mapping->ops->destroy) {
404     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
405   }
406   PetscFunctionReturn(0);
407 }
408 
409 /*@
410     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
411     ordering and a global parallel ordering.
412 
413     Not Collective
414 
415     Input Parameters:
416 .   mapping - mapping data structure
417 
418     Output Parameter:
419 .   bs - the blocksize
420 
421     Level: advanced
422 
423     Concepts: mapping^local to global
424 
425 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
426 @*/
427 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
428 {
429   PetscFunctionBegin;
430   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
431   *bs = mapping->bs;
432   PetscFunctionReturn(0);
433 }
434 
435 /*@
436     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
437     ordering and a global parallel ordering.
438 
439     Not Collective, but communicator may have more than one process
440 
441     Input Parameters:
442 +   comm - MPI communicator
443 .   bs - the block size
444 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
445 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
446 -   mode - see PetscCopyMode
447 
448     Output Parameter:
449 .   mapping - new mapping data structure
450 
451     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
452 
453     For "small" problems when using ISGlobalToMappingApply() and ISGlobalToMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
454     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
455     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
456 
457     Level: advanced
458 
459     Concepts: mapping^local to global
460 
461 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
462           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
463 @*/
464 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
465 {
466   PetscErrorCode ierr;
467   PetscInt       *in;
468 
469   PetscFunctionBegin;
470   if (n) PetscValidIntPointer(indices,3);
471   PetscValidPointer(mapping,4);
472 
473   *mapping = NULL;
474   ierr = ISInitializePackage();CHKERRQ(ierr);
475 
476   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
477                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
478   (*mapping)->n             = n;
479   (*mapping)->bs            = bs;
480   (*mapping)->info_cached   = PETSC_FALSE;
481   (*mapping)->info_free     = PETSC_FALSE;
482   (*mapping)->info_procs    = NULL;
483   (*mapping)->info_numprocs = NULL;
484   (*mapping)->info_indices  = NULL;
485 
486   (*mapping)->ops->globaltolocalmappingapply      = NULL;
487   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
488   (*mapping)->ops->destroy                        = NULL;
489   if (mode == PETSC_COPY_VALUES) {
490     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
491     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
492     (*mapping)->indices = in;
493     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
494   } else if (mode == PETSC_OWN_POINTER) {
495     (*mapping)->indices = (PetscInt*)indices;
496     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
497   }
498   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
499   PetscFunctionReturn(0);
500 }
501 
502 PetscFunctionList ISLocalToGlobalMappingList = NULL;
503 
504 
505 /*@
506    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
507 
508    Not collective
509 
510    Input Parameters:
511 .  mapping - mapping data structure
512 
513    Level: advanced
514 
515 @*/
516 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
517 {
518   PetscErrorCode             ierr;
519   char                       type[256];
520   ISLocalToGlobalMappingType defaulttype = "Not set";
521   PetscBool                  flg;
522 
523   PetscFunctionBegin;
524   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
525   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
526   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
527   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
528   if (flg) {
529     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
530   }
531   ierr = PetscOptionsEnd();CHKERRQ(ierr);
532   PetscFunctionReturn(0);
533 }
534 
535 /*@
536    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
537    ordering and a global parallel ordering.
538 
539    Note Collective
540 
541    Input Parameters:
542 .  mapping - mapping data structure
543 
544    Level: advanced
545 
546 .seealso: ISLocalToGlobalMappingCreate()
547 @*/
548 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
549 {
550   PetscErrorCode ierr;
551 
552   PetscFunctionBegin;
553   if (!*mapping) PetscFunctionReturn(0);
554   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
555   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
556   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
557   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
558   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
559   if ((*mapping)->info_indices) {
560     PetscInt i;
561 
562     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
563     for (i=1; i<(*mapping)->info_nproc; i++) {
564       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
565     }
566     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
567   }
568   if ((*mapping)->ops->destroy) {
569     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
570   }
571   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
572   *mapping = 0;
573   PetscFunctionReturn(0);
574 }
575 
576 /*@
577     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
578     a new index set using the global numbering defined in an ISLocalToGlobalMapping
579     context.
580 
581     Not collective
582 
583     Input Parameters:
584 +   mapping - mapping between local and global numbering
585 -   is - index set in local numbering
586 
587     Output Parameters:
588 .   newis - index set in global numbering
589 
590     Level: advanced
591 
592     Concepts: mapping^local to global
593 
594 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
595           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
596 @*/
597 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
598 {
599   PetscErrorCode ierr;
600   PetscInt       n,*idxout;
601   const PetscInt *idxin;
602 
603   PetscFunctionBegin;
604   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
605   PetscValidHeaderSpecific(is,IS_CLASSID,2);
606   PetscValidPointer(newis,3);
607 
608   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
609   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
610   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
611   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
612   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
613   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 /*@
618    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
619    and converts them to the global numbering.
620 
621    Not collective
622 
623    Input Parameters:
624 +  mapping - the local to global mapping context
625 .  N - number of integers
626 -  in - input indices in local numbering
627 
628    Output Parameter:
629 .  out - indices in global numbering
630 
631    Notes:
632    The in and out array parameters may be identical.
633 
634    Level: advanced
635 
636 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
637           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
638           AOPetscToApplication(), ISGlobalToLocalMappingApply()
639 
640     Concepts: mapping^local to global
641 @*/
642 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
643 {
644   PetscInt i,bs,Nmax;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
648   bs   = mapping->bs;
649   Nmax = bs*mapping->n;
650   if (bs == 1) {
651     const PetscInt *idx = mapping->indices;
652     for (i=0; i<N; i++) {
653       if (in[i] < 0) {
654         out[i] = in[i];
655         continue;
656       }
657       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
658       out[i] = idx[in[i]];
659     }
660   } else {
661     const PetscInt *idx = mapping->indices;
662     for (i=0; i<N; i++) {
663       if (in[i] < 0) {
664         out[i] = in[i];
665         continue;
666       }
667       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
668       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
669     }
670   }
671   PetscFunctionReturn(0);
672 }
673 
674 /*@
675    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
676 
677    Not collective
678 
679    Input Parameters:
680 +  mapping - the local to global mapping context
681 .  N - number of integers
682 -  in - input indices in local block numbering
683 
684    Output Parameter:
685 .  out - indices in global block numbering
686 
687    Notes:
688    The in and out array parameters may be identical.
689 
690    Example:
691      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
692      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
693 
694    Level: advanced
695 
696 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
697           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
698           AOPetscToApplication(), ISGlobalToLocalMappingApply()
699 
700     Concepts: mapping^local to global
701 @*/
702 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
703 {
704 
705   PetscFunctionBegin;
706   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
707   {
708     PetscInt i,Nmax = mapping->n;
709     const PetscInt *idx = mapping->indices;
710 
711     for (i=0; i<N; i++) {
712       if (in[i] < 0) {
713         out[i] = in[i];
714         continue;
715       }
716       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
717       out[i] = idx[in[i]];
718     }
719   }
720   PetscFunctionReturn(0);
721 }
722 
723 /*@
724     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
725     specified with a global numbering.
726 
727     Not collective
728 
729     Input Parameters:
730 +   mapping - mapping between local and global numbering
731 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
732            IS_GTOLM_DROP - drops the indices with no local value from the output list
733 .   n - number of global indices to map
734 -   idx - global indices to map
735 
736     Output Parameters:
737 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
738 -   idxout - local index of each global index, one must pass in an array long enough
739              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
740              idxout == NULL to determine the required length (returned in nout)
741              and then allocate the required space and call ISGlobalToLocalMappingApply()
742              a second time to set the values.
743 
744     Notes:
745     Either nout or idxout may be NULL. idx and idxout may be identical.
746 
747     For "small" problems when using ISGlobalToMappingApply() and ISGlobalToMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
748     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
749     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
750 
751     Level: advanced
752 
753     Developer Note: The manual page states that idx and idxout may be identical but the calling
754        sequence declares idx as const so it cannot be the same as idxout.
755 
756     Concepts: mapping^global to local
757 
758 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
759           ISLocalToGlobalMappingDestroy()
760 @*/
761 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
762 {
763   PetscErrorCode ierr;
764 
765   PetscFunctionBegin;
766   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
767   if (!mapping->data) {
768     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
769   }
770   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
771   PetscFunctionReturn(0);
772 }
773 
774 /*@
775     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
776     a new index set using the local numbering defined in an ISLocalToGlobalMapping
777     context.
778 
779     Not collective
780 
781     Input Parameters:
782 +   mapping - mapping between local and global numbering
783 -   is - index set in global numbering
784 
785     Output Parameters:
786 .   newis - index set in local numbering
787 
788     Level: advanced
789 
790     Concepts: mapping^local to global
791 
792 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
793           ISLocalToGlobalMappingDestroy()
794 @*/
795 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type, IS is,IS *newis)
796 {
797   PetscErrorCode ierr;
798   PetscInt       n,nout,*idxout;
799   const PetscInt *idxin;
800 
801   PetscFunctionBegin;
802   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
803   PetscValidHeaderSpecific(is,IS_CLASSID,3);
804   PetscValidPointer(newis,4);
805 
806   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
807   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
808   if (type == IS_GTOLM_MASK) {
809     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
810   } else {
811     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
812     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
813   }
814   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
815   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
816   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 /*@
821     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
822     specified with a block global numbering.
823 
824     Not collective
825 
826     Input Parameters:
827 +   mapping - mapping between local and global numbering
828 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
829            IS_GTOLM_DROP - drops the indices with no local value from the output list
830 .   n - number of global indices to map
831 -   idx - global indices to map
832 
833     Output Parameters:
834 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
835 -   idxout - local index of each global index, one must pass in an array long enough
836              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
837              idxout == NULL to determine the required length (returned in nout)
838              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
839              a second time to set the values.
840 
841     Notes:
842     Either nout or idxout may be NULL. idx and idxout may be identical.
843 
844     For "small" problems when using ISGlobalToMappingApply() and ISGlobalToMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
845     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
846     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
847 
848     Level: advanced
849 
850     Developer Note: The manual page states that idx and idxout may be identical but the calling
851        sequence declares idx as const so it cannot be the same as idxout.
852 
853     Concepts: mapping^global to local
854 
855 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
856           ISLocalToGlobalMappingDestroy()
857 @*/
858 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
859                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
860 {
861   PetscErrorCode ierr;
862 
863   PetscFunctionBegin;
864   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
865   if (!mapping->data) {
866     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
867   }
868   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
869   PetscFunctionReturn(0);
870 }
871 
872 
873 /*@C
874     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
875      each index shared by more than one processor
876 
877     Collective on ISLocalToGlobalMapping
878 
879     Input Parameters:
880 .   mapping - the mapping from local to global indexing
881 
882     Output Parameter:
883 +   nproc - number of processors that are connected to this one
884 .   proc - neighboring processors
885 .   numproc - number of indices for each subdomain (processor)
886 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
887 
888     Level: advanced
889 
890     Concepts: mapping^local to global
891 
892     Fortran Usage:
893 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
894 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
895           PetscInt indices[nproc][numprocmax],ierr)
896         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
897         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
898 
899 
900 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
901           ISLocalToGlobalMappingRestoreInfo()
902 @*/
903 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
904 {
905   PetscErrorCode ierr;
906 
907   PetscFunctionBegin;
908   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
909   if (mapping->info_cached) {
910     *nproc    = mapping->info_nproc;
911     *procs    = mapping->info_procs;
912     *numprocs = mapping->info_numprocs;
913     *indices  = mapping->info_indices;
914   } else {
915     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
916   }
917   PetscFunctionReturn(0);
918 }
919 
920 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
921 {
922   PetscErrorCode ierr;
923   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
924   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
925   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
926   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
927   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
928   PetscInt       first_procs,first_numprocs,*first_indices;
929   MPI_Request    *recv_waits,*send_waits;
930   MPI_Status     recv_status,*send_status,*recv_statuses;
931   MPI_Comm       comm;
932   PetscBool      debug = PETSC_FALSE;
933 
934   PetscFunctionBegin;
935   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
936   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
937   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
938   if (size == 1) {
939     *nproc         = 0;
940     *procs         = NULL;
941     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
942     (*numprocs)[0] = 0;
943     ierr           = PetscNew(indices);CHKERRQ(ierr);
944     (*indices)[0]  = NULL;
945     /* save info for reuse */
946     mapping->info_nproc = *nproc;
947     mapping->info_procs = *procs;
948     mapping->info_numprocs = *numprocs;
949     mapping->info_indices = *indices;
950     mapping->info_cached = PETSC_TRUE;
951     PetscFunctionReturn(0);
952   }
953 
954   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
955 
956   /*
957     Notes on ISLocalToGlobalMappingGetBlockInfo
958 
959     globally owned node - the nodes that have been assigned to this processor in global
960            numbering, just for this routine.
961 
962     nontrivial globally owned node - node assigned to this processor that is on a subdomain
963            boundary (i.e. is has more than one local owner)
964 
965     locally owned node - node that exists on this processors subdomain
966 
967     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
968            local subdomain
969   */
970   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
971   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
972   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
973 
974   for (i=0; i<n; i++) {
975     if (lindices[i] > max) max = lindices[i];
976   }
977   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
978   Ng++;
979   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
980   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
981   scale  = Ng/size + 1;
982   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
983   rstart = scale*rank;
984 
985   /* determine ownership ranges of global indices */
986   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
987   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
988 
989   /* determine owners of each local node  */
990   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
991   for (i=0; i<n; i++) {
992     proc             = lindices[i]/scale; /* processor that globally owns this index */
993     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
994     owner[i]         = proc;
995     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
996   }
997   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
998   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
999 
1000   /* inform other processors of number of messages and max length*/
1001   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1002   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1003 
1004   /* post receives for owned rows */
1005   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1006   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1007   for (i=0; i<nrecvs; i++) {
1008     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1009   }
1010 
1011   /* pack messages containing lists of local nodes to owners */
1012   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1013   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1014   starts[0] = 0;
1015   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1016   for (i=0; i<n; i++) {
1017     sends[starts[owner[i]]++] = lindices[i];
1018     sends[starts[owner[i]]++] = i;
1019   }
1020   ierr = PetscFree(owner);CHKERRQ(ierr);
1021   starts[0] = 0;
1022   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1023 
1024   /* send the messages */
1025   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1026   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1027   cnt = 0;
1028   for (i=0; i<size; i++) {
1029     if (nprocs[2*i]) {
1030       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1031       dest[cnt] = i;
1032       cnt++;
1033     }
1034   }
1035   ierr = PetscFree(starts);CHKERRQ(ierr);
1036 
1037   /* wait on receives */
1038   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1039   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1040   cnt  = nrecvs;
1041   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1042   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1043   while (cnt) {
1044     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1045     /* unpack receives into our local space */
1046     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1047     source[imdex] = recv_status.MPI_SOURCE;
1048     len[imdex]    = len[imdex]/2;
1049     /* count how many local owners for each of my global owned indices */
1050     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1051     cnt--;
1052   }
1053   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1054 
1055   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1056   nowned  = 0;
1057   nownedm = 0;
1058   for (i=0; i<ng; i++) {
1059     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1060   }
1061 
1062   /* create single array to contain rank of all local owners of each globally owned index */
1063   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1064   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1065   starts[0] = 0;
1066   for (i=1; i<ng; i++) {
1067     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1068     else starts[i] = starts[i-1];
1069   }
1070 
1071   /* for each nontrival globally owned node list all arriving processors */
1072   for (i=0; i<nrecvs; i++) {
1073     for (j=0; j<len[i]; j++) {
1074       node = recvs[2*i*nmax+2*j]-rstart;
1075       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1076     }
1077   }
1078 
1079   if (debug) { /* -----------------------------------  */
1080     starts[0] = 0;
1081     for (i=1; i<ng; i++) {
1082       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1083       else starts[i] = starts[i-1];
1084     }
1085     for (i=0; i<ng; i++) {
1086       if (nownedsenders[i] > 1) {
1087         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1088         for (j=0; j<nownedsenders[i]; j++) {
1089           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1090         }
1091         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1092       }
1093     }
1094     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1095   } /* -----------------------------------  */
1096 
1097   /* wait on original sends */
1098   if (nsends) {
1099     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1100     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1101     ierr = PetscFree(send_status);CHKERRQ(ierr);
1102   }
1103   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1104   ierr = PetscFree(sends);CHKERRQ(ierr);
1105   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1106 
1107   /* pack messages to send back to local owners */
1108   starts[0] = 0;
1109   for (i=1; i<ng; i++) {
1110     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1111     else starts[i] = starts[i-1];
1112   }
1113   nsends2 = nrecvs;
1114   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1115   for (i=0; i<nrecvs; i++) {
1116     nprocs[i] = 1;
1117     for (j=0; j<len[i]; j++) {
1118       node = recvs[2*i*nmax+2*j]-rstart;
1119       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1120     }
1121   }
1122   nt = 0;
1123   for (i=0; i<nsends2; i++) nt += nprocs[i];
1124 
1125   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1126   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1127 
1128   starts2[0] = 0;
1129   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1130   /*
1131      Each message is 1 + nprocs[i] long, and consists of
1132        (0) the number of nodes being sent back
1133        (1) the local node number,
1134        (2) the number of processors sharing it,
1135        (3) the processors sharing it
1136   */
1137   for (i=0; i<nsends2; i++) {
1138     cnt = 1;
1139     sends2[starts2[i]] = 0;
1140     for (j=0; j<len[i]; j++) {
1141       node = recvs[2*i*nmax+2*j]-rstart;
1142       if (nownedsenders[node] > 1) {
1143         sends2[starts2[i]]++;
1144         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1145         sends2[starts2[i]+cnt++] = nownedsenders[node];
1146         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1147         cnt += nownedsenders[node];
1148       }
1149     }
1150   }
1151 
1152   /* receive the message lengths */
1153   nrecvs2 = nsends;
1154   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1155   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1156   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1157   for (i=0; i<nrecvs2; i++) {
1158     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1159   }
1160 
1161   /* send the message lengths */
1162   for (i=0; i<nsends2; i++) {
1163     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1164   }
1165 
1166   /* wait on receives of lens */
1167   if (nrecvs2) {
1168     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1169     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1170     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1171   }
1172   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1173 
1174   starts3[0] = 0;
1175   nt         = 0;
1176   for (i=0; i<nrecvs2-1; i++) {
1177     starts3[i+1] = starts3[i] + lens2[i];
1178     nt          += lens2[i];
1179   }
1180   if (nrecvs2) nt += lens2[nrecvs2-1];
1181 
1182   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1183   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1184   for (i=0; i<nrecvs2; i++) {
1185     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1186   }
1187 
1188   /* send the messages */
1189   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1190   for (i=0; i<nsends2; i++) {
1191     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1192   }
1193 
1194   /* wait on receives */
1195   if (nrecvs2) {
1196     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1197     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1198     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1199   }
1200   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1201   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1202 
1203   if (debug) { /* -----------------------------------  */
1204     cnt = 0;
1205     for (i=0; i<nrecvs2; i++) {
1206       nt = recvs2[cnt++];
1207       for (j=0; j<nt; j++) {
1208         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1209         for (k=0; k<recvs2[cnt+1]; k++) {
1210           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1211         }
1212         cnt += 2 + recvs2[cnt+1];
1213         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1214       }
1215     }
1216     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1217   } /* -----------------------------------  */
1218 
1219   /* count number subdomains for each local node */
1220   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1221   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1222   cnt  = 0;
1223   for (i=0; i<nrecvs2; i++) {
1224     nt = recvs2[cnt++];
1225     for (j=0; j<nt; j++) {
1226       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1227       cnt += 2 + recvs2[cnt+1];
1228     }
1229   }
1230   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1231   *nproc    = nt;
1232   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1233   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1234   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1235   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1236   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1237   cnt  = 0;
1238   for (i=0; i<size; i++) {
1239     if (nprocs[i] > 0) {
1240       bprocs[i]        = cnt;
1241       (*procs)[cnt]    = i;
1242       (*numprocs)[cnt] = nprocs[i];
1243       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1244       cnt++;
1245     }
1246   }
1247 
1248   /* make the list of subdomains for each nontrivial local node */
1249   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1250   cnt  = 0;
1251   for (i=0; i<nrecvs2; i++) {
1252     nt = recvs2[cnt++];
1253     for (j=0; j<nt; j++) {
1254       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1255       cnt += 2 + recvs2[cnt+1];
1256     }
1257   }
1258   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1259   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1260 
1261   /* sort the node indexing by their global numbers */
1262   nt = *nproc;
1263   for (i=0; i<nt; i++) {
1264     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1265     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1266     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1267     ierr = PetscFree(tmp);CHKERRQ(ierr);
1268   }
1269 
1270   if (debug) { /* -----------------------------------  */
1271     nt = *nproc;
1272     for (i=0; i<nt; i++) {
1273       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1274       for (j=0; j<(*numprocs)[i]; j++) {
1275         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1276       }
1277       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1278     }
1279     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1280   } /* -----------------------------------  */
1281 
1282   /* wait on sends */
1283   if (nsends2) {
1284     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1285     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1286     ierr = PetscFree(send_status);CHKERRQ(ierr);
1287   }
1288 
1289   ierr = PetscFree(starts3);CHKERRQ(ierr);
1290   ierr = PetscFree(dest);CHKERRQ(ierr);
1291   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1292 
1293   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1294   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1295   ierr = PetscFree(starts);CHKERRQ(ierr);
1296   ierr = PetscFree(starts2);CHKERRQ(ierr);
1297   ierr = PetscFree(lens2);CHKERRQ(ierr);
1298 
1299   ierr = PetscFree(source);CHKERRQ(ierr);
1300   ierr = PetscFree(len);CHKERRQ(ierr);
1301   ierr = PetscFree(recvs);CHKERRQ(ierr);
1302   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1303   ierr = PetscFree(sends2);CHKERRQ(ierr);
1304 
1305   /* put the information about myself as the first entry in the list */
1306   first_procs    = (*procs)[0];
1307   first_numprocs = (*numprocs)[0];
1308   first_indices  = (*indices)[0];
1309   for (i=0; i<*nproc; i++) {
1310     if ((*procs)[i] == rank) {
1311       (*procs)[0]    = (*procs)[i];
1312       (*numprocs)[0] = (*numprocs)[i];
1313       (*indices)[0]  = (*indices)[i];
1314       (*procs)[i]    = first_procs;
1315       (*numprocs)[i] = first_numprocs;
1316       (*indices)[i]  = first_indices;
1317       break;
1318     }
1319   }
1320 
1321   /* save info for reuse */
1322   mapping->info_nproc = *nproc;
1323   mapping->info_procs = *procs;
1324   mapping->info_numprocs = *numprocs;
1325   mapping->info_indices = *indices;
1326   mapping->info_cached = PETSC_TRUE;
1327   PetscFunctionReturn(0);
1328 }
1329 
1330 /*@C
1331     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1332 
1333     Collective on ISLocalToGlobalMapping
1334 
1335     Input Parameters:
1336 .   mapping - the mapping from local to global indexing
1337 
1338     Output Parameter:
1339 +   nproc - number of processors that are connected to this one
1340 .   proc - neighboring processors
1341 .   numproc - number of indices for each processor
1342 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1343 
1344     Level: advanced
1345 
1346 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1347           ISLocalToGlobalMappingGetInfo()
1348 @*/
1349 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1350 {
1351   PetscErrorCode ierr;
1352 
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1355   if (mapping->info_free) {
1356     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1357     if (*indices) {
1358       PetscInt i;
1359 
1360       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1361       for (i=1; i<*nproc; i++) {
1362         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1363       }
1364       ierr = PetscFree(*indices);CHKERRQ(ierr);
1365     }
1366   }
1367   *nproc    = 0;
1368   *procs    = NULL;
1369   *numprocs = NULL;
1370   *indices  = NULL;
1371   PetscFunctionReturn(0);
1372 }
1373 
1374 /*@C
1375     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1376      each index shared by more than one processor
1377 
1378     Collective on ISLocalToGlobalMapping
1379 
1380     Input Parameters:
1381 .   mapping - the mapping from local to global indexing
1382 
1383     Output Parameter:
1384 +   nproc - number of processors that are connected to this one
1385 .   proc - neighboring processors
1386 .   numproc - number of indices for each subdomain (processor)
1387 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1388 
1389     Level: advanced
1390 
1391     Concepts: mapping^local to global
1392 
1393     Fortran Usage:
1394 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1395 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1396           PetscInt indices[nproc][numprocmax],ierr)
1397         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1398         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1399 
1400 
1401 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1402           ISLocalToGlobalMappingRestoreInfo()
1403 @*/
1404 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1405 {
1406   PetscErrorCode ierr;
1407   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1408 
1409   PetscFunctionBegin;
1410   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1411   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1412   if (bs > 1) { /* we need to expand the cached info */
1413     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1414     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1415     for (i=0; i<*nproc; i++) {
1416       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1417       for (j=0; j<bnumprocs[i]; j++) {
1418         for (k=0; k<bs; k++) {
1419           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1420         }
1421       }
1422       (*numprocs)[i] = bnumprocs[i]*bs;
1423     }
1424     mapping->info_free = PETSC_TRUE;
1425   } else {
1426     *numprocs = bnumprocs;
1427     *indices  = bindices;
1428   }
1429   PetscFunctionReturn(0);
1430 }
1431 
1432 /*@C
1433     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1434 
1435     Collective on ISLocalToGlobalMapping
1436 
1437     Input Parameters:
1438 .   mapping - the mapping from local to global indexing
1439 
1440     Output Parameter:
1441 +   nproc - number of processors that are connected to this one
1442 .   proc - neighboring processors
1443 .   numproc - number of indices for each processor
1444 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1445 
1446     Level: advanced
1447 
1448 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1449           ISLocalToGlobalMappingGetInfo()
1450 @*/
1451 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1452 {
1453   PetscErrorCode ierr;
1454 
1455   PetscFunctionBegin;
1456   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1457   PetscFunctionReturn(0);
1458 }
1459 
1460 /*@C
1461    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1462 
1463    Not Collective
1464 
1465    Input Arguments:
1466 . ltog - local to global mapping
1467 
1468    Output Arguments:
1469 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1470 
1471    Level: advanced
1472 
1473    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1474 
1475 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1476 @*/
1477 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1478 {
1479   PetscFunctionBegin;
1480   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1481   PetscValidPointer(array,2);
1482   if (ltog->bs == 1) {
1483     *array = ltog->indices;
1484   } else {
1485     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1486     const PetscInt *ii;
1487     PetscErrorCode ierr;
1488 
1489     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1490     *array = jj;
1491     k    = 0;
1492     ii   = ltog->indices;
1493     for (i=0; i<n; i++)
1494       for (j=0; j<bs; j++)
1495         jj[k++] = bs*ii[i] + j;
1496   }
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 /*@C
1501    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1502 
1503    Not Collective
1504 
1505    Input Arguments:
1506 + ltog - local to global mapping
1507 - array - array of indices
1508 
1509    Level: advanced
1510 
1511 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1512 @*/
1513 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1514 {
1515   PetscFunctionBegin;
1516   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1517   PetscValidPointer(array,2);
1518   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1519 
1520   if (ltog->bs > 1) {
1521     PetscErrorCode ierr;
1522     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1523   }
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 /*@C
1528    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1529 
1530    Not Collective
1531 
1532    Input Arguments:
1533 . ltog - local to global mapping
1534 
1535    Output Arguments:
1536 . array - array of indices
1537 
1538    Level: advanced
1539 
1540 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1541 @*/
1542 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1543 {
1544   PetscFunctionBegin;
1545   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1546   PetscValidPointer(array,2);
1547   *array = ltog->indices;
1548   PetscFunctionReturn(0);
1549 }
1550 
1551 /*@C
1552    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1553 
1554    Not Collective
1555 
1556    Input Arguments:
1557 + ltog - local to global mapping
1558 - array - array of indices
1559 
1560    Level: advanced
1561 
1562 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1563 @*/
1564 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1565 {
1566   PetscFunctionBegin;
1567   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1568   PetscValidPointer(array,2);
1569   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1570   *array = NULL;
1571   PetscFunctionReturn(0);
1572 }
1573 
1574 /*@C
1575    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1576 
1577    Not Collective
1578 
1579    Input Arguments:
1580 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1581 . n - number of mappings to concatenate
1582 - ltogs - local to global mappings
1583 
1584    Output Arguments:
1585 . ltogcat - new mapping
1586 
1587    Note: this currently always returns a mapping with block size of 1
1588 
1589    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1590 
1591    Level: advanced
1592 
1593 .seealso: ISLocalToGlobalMappingCreate()
1594 @*/
1595 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1596 {
1597   PetscInt       i,cnt,m,*idx;
1598   PetscErrorCode ierr;
1599 
1600   PetscFunctionBegin;
1601   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1602   if (n > 0) PetscValidPointer(ltogs,3);
1603   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1604   PetscValidPointer(ltogcat,4);
1605   for (cnt=0,i=0; i<n; i++) {
1606     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1607     cnt += m;
1608   }
1609   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1610   for (cnt=0,i=0; i<n; i++) {
1611     const PetscInt *subidx;
1612     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1613     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1614     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1615     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1616     cnt += m;
1617   }
1618   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1619   PetscFunctionReturn(0);
1620 }
1621 
1622 /*MC
1623       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1624                                     used this is good for only small and moderate size problems.
1625 
1626    Options Database Keys:
1627 +   -islocaltoglobalmapping_type basic - select this method
1628 
1629    Level: beginner
1630 
1631 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1632 M*/
1633 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1634 {
1635   PetscFunctionBegin;
1636   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1637   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1638   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1639   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1640   PetscFunctionReturn(0);
1641 }
1642 
1643 /*MC
1644       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1645                                     used this is good for large memory problems.
1646 
1647    Options Database Keys:
1648 +   -islocaltoglobalmapping_type hash - select this method
1649 
1650 
1651    Notes: This is selected automatically for large problems if the user does not set the type.
1652 
1653    Level: beginner
1654 
1655 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1656 M*/
1657 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1658 {
1659   PetscFunctionBegin;
1660   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1661   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1662   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1663   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1664   PetscFunctionReturn(0);
1665 }
1666 
1667 
1668 /*@C
1669     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1670 
1671    Not Collective
1672 
1673    Input Parameters:
1674 +  sname - name of a new method
1675 -  routine_create - routine to create method context
1676 
1677    Notes:
1678    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1679 
1680    Sample usage:
1681 .vb
1682    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1683 .ve
1684 
1685    Then, your mapping can be chosen with the procedural interface via
1686 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1687    or at runtime via the option
1688 $     -islocaltoglobalmapping_type my_mapper
1689 
1690    Level: advanced
1691 
1692 .keywords: ISLocalToGlobalMappingType, register
1693 
1694 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1695 
1696 @*/
1697 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1698 {
1699   PetscErrorCode ierr;
1700 
1701   PetscFunctionBegin;
1702   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1703   PetscFunctionReturn(0);
1704 }
1705 
1706 /*@C
1707    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1708 
1709    Logically Collective on ISLocalToGlobalMapping
1710 
1711    Input Parameters:
1712 +  ltog - the ISLocalToGlobalMapping object
1713 -  type - a known method
1714 
1715    Options Database Key:
1716 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1717     of available methods (for instance, basic or hash)
1718 
1719    Notes:
1720    See "petsc/include/petscis.h" for available methods
1721 
1722   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1723   then set the ISLocalToGlobalMapping type from the options database rather than by using
1724   this routine.
1725 
1726   Level: intermediate
1727 
1728   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1729   are accessed by ISLocalToGlobalMappingSetType().
1730 
1731 .keywords: ISLocalToGlobalMapping, set, method
1732 
1733 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1734 
1735 @*/
1736 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1737 {
1738   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1739   PetscBool      match;
1740 
1741   PetscFunctionBegin;
1742   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1743   PetscValidCharPointer(type,2);
1744 
1745   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1746   if (match) PetscFunctionReturn(0);
1747 
1748   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1749   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1750   /* Destroy the previous private LTOG context */
1751   if (ltog->ops->destroy) {
1752     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1753     ltog->ops->destroy = NULL;
1754   }
1755   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1756   ierr = (*r)(ltog);CHKERRQ(ierr);
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1761 
1762 /*@C
1763   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1764 
1765   Not Collective
1766 
1767   Level: advanced
1768 
1769 .keywords: IS, register, all
1770 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1771 @*/
1772 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1773 {
1774   PetscErrorCode ierr;
1775 
1776   PetscFunctionBegin;
1777   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1778   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1779 
1780   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1781   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1782   PetscFunctionReturn(0);
1783 }
1784 
1785