xref: /petsc/src/vec/is/utils/isltog.c (revision 487a658c8b32ba712a1dc8280daad2fd70c1dcd9)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 /* -----------------------------------------------------------------------------------------*/
20 
21 /*
22     Creates the global mapping information in the ISLocalToGlobalMapping structure
23 
24     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
25 */
26 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
27 {
28   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   if (mapping->data) PetscFunctionReturn(0);
33   end   = 0;
34   start = PETSC_MAX_INT;
35 
36   for (i=0; i<n; i++) {
37     if (idx[i] < 0) continue;
38     if (idx[i] < start) start = idx[i];
39     if (idx[i] > end)   end   = idx[i];
40   }
41   if (start > end) {start = 0; end = -1;}
42   mapping->globalstart = start;
43   mapping->globalend   = end;
44   if (!((PetscObject)mapping)->type_name) {
45     if ((end - start) > PetscMax(4*n,1000000)) {
46       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
47     } else {
48       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
49     }
50   }
51   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
52   PetscFunctionReturn(0);
53 }
54 
55 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
56 {
57   PetscErrorCode              ierr;
58   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
59   ISLocalToGlobalMapping_Basic *map;
60 
61   PetscFunctionBegin;
62   start            = mapping->globalstart;
63   end              = mapping->globalend;
64   ierr             = PetscNew(&map);CHKERRQ(ierr);
65   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
66   map->globals     = globals;
67   for (i=0; i<end-start+1; i++) globals[i] = -1;
68   for (i=0; i<n; i++) {
69     if (idx[i] < 0) continue;
70     globals[idx[i] - start] = i;
71   }
72   mapping->data = (void*)map;
73   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
78 {
79   PetscErrorCode              ierr;
80   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
81   ISLocalToGlobalMapping_Hash *map;
82 
83   PetscFunctionBegin;
84   ierr = PetscNew(&map);CHKERRQ(ierr);
85   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
86   for (i=0; i<n; i++ ) {
87     if (idx[i] < 0) continue;
88     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
89   }
90   mapping->data = (void*)map;
91   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
92   PetscFunctionReturn(0);
93 }
94 
95 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
96 {
97   PetscErrorCode              ierr;
98   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
99 
100   PetscFunctionBegin;
101   if (!map) PetscFunctionReturn(0);
102   ierr = PetscFree(map->globals);CHKERRQ(ierr);
103   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
104   PetscFunctionReturn(0);
105 }
106 
107 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
108 {
109   PetscErrorCode              ierr;
110   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
111 
112   PetscFunctionBegin;
113   if (!map) PetscFunctionReturn(0);
114   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
115   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
116   PetscFunctionReturn(0);
117 }
118 
119 #define GTOLTYPE _Basic
120 #define GTOLNAME _Basic
121 #define GTOLBS mapping->bs
122 #define GTOL(g, local) do {                  \
123     local = map->globals[g/bs - start];      \
124     local = bs*local + (g % bs);             \
125   } while (0)
126 
127 #include <../src/vec/is/utils/isltog.h>
128 
129 #define GTOLTYPE _Basic
130 #define GTOLNAME Block_Basic
131 #define GTOLBS 1
132 #define GTOL(g, local) do {                  \
133     local = map->globals[g - start];         \
134   } while (0)
135 #include <../src/vec/is/utils/isltog.h>
136 
137 #define GTOLTYPE _Hash
138 #define GTOLNAME _Hash
139 #define GTOLBS mapping->bs
140 #define GTOL(g, local) do {                         \
141     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
142     local = bs*local + (g % bs);                    \
143    } while (0)
144 #include <../src/vec/is/utils/isltog.h>
145 
146 #define GTOLTYPE _Hash
147 #define GTOLNAME Block_Hash
148 #define GTOLBS 1
149 #define GTOL(g, local) do {                         \
150     (void)PetscHMapIGet(map->globalht,g,&local);    \
151   } while (0)
152 #include <../src/vec/is/utils/isltog.h>
153 
154 /*@
155     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
156 
157     Not Collective
158 
159     Input Parameter:
160 .   ltog - local to global mapping
161 
162     Output Parameter:
163 .   nltog - the duplicated local to global mapping
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
170 @*/
171 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
172 {
173   PetscErrorCode ierr;
174 
175   PetscFunctionBegin;
176   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
177   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 /*@
182     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
183 
184     Not Collective
185 
186     Input Parameter:
187 .   ltog - local to global mapping
188 
189     Output Parameter:
190 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
191 
192     Level: advanced
193 
194     Concepts: mapping^local to global
195 
196 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
197 @*/
198 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
199 {
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
202   PetscValidIntPointer(n,2);
203   *n = mapping->bs*mapping->n;
204   PetscFunctionReturn(0);
205 }
206 
207 /*@C
208     ISLocalToGlobalMappingView - View a local to global mapping
209 
210     Not Collective
211 
212     Input Parameters:
213 +   ltog - local to global mapping
214 -   viewer - viewer
215 
216     Level: advanced
217 
218     Concepts: mapping^local to global
219 
220 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
221 @*/
222 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
223 {
224   PetscInt       i;
225   PetscMPIInt    rank;
226   PetscBool      iascii;
227   PetscErrorCode ierr;
228 
229   PetscFunctionBegin;
230   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
231   if (!viewer) {
232     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
233   }
234   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
235 
236   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
237   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
238   if (iascii) {
239     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
240     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
241     for (i=0; i<mapping->n; i++) {
242       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
243     }
244     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
245     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
246   }
247   PetscFunctionReturn(0);
248 }
249 
250 /*@
251     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
252     ordering and a global parallel ordering.
253 
254     Not collective
255 
256     Input Parameter:
257 .   is - index set containing the global numbers for each local number
258 
259     Output Parameter:
260 .   mapping - new mapping data structure
261 
262     Notes:
263     the block size of the IS determines the block size of the mapping
264     Level: advanced
265 
266     Concepts: mapping^local to global
267 
268 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
269 @*/
270 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
271 {
272   PetscErrorCode ierr;
273   PetscInt       n,bs;
274   const PetscInt *indices;
275   MPI_Comm       comm;
276   PetscBool      isblock;
277 
278   PetscFunctionBegin;
279   PetscValidHeaderSpecific(is,IS_CLASSID,1);
280   PetscValidPointer(mapping,2);
281 
282   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
283   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
284   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
285   if (!isblock) {
286     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
287     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
288     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
289   } else {
290     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
291     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
292     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
293     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 /*@C
299     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
300     ordering and a global parallel ordering.
301 
302     Collective
303 
304     Input Parameter:
305 +   sf - star forest mapping contiguous local indices to (rank, offset)
306 -   start - first global index on this process
307 
308     Output Parameter:
309 .   mapping - new mapping data structure
310 
311     Level: advanced
312 
313     Concepts: mapping^local to global
314 
315 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
316 @*/
317 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
318 {
319   PetscErrorCode ierr;
320   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
321   const PetscInt *ilocal;
322   MPI_Comm       comm;
323 
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
326   PetscValidPointer(mapping,3);
327 
328   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
329   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
330   if (ilocal) {
331     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
332   }
333   else maxlocal = nleaves;
334   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
335   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
336   for (i=0; i<nroots; i++) globals[i] = start + i;
337   for (i=0; i<maxlocal; i++) ltog[i] = -1;
338   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
339   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
340   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
341   ierr = PetscFree(globals);CHKERRQ(ierr);
342   PetscFunctionReturn(0);
343 }
344 
345 /*@
346     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
347 
348     Not collective
349 
350     Input Parameters:
351 .   mapping - mapping data structure
352 .   bs - the blocksize
353 
354     Level: advanced
355 
356     Concepts: mapping^local to global
357 
358 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
359 @*/
360 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
361 {
362   PetscInt       *nid;
363   const PetscInt *oid;
364   PetscInt       i,cn,on,obs,nn;
365   PetscErrorCode ierr;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
369   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
370   if (bs == mapping->bs) PetscFunctionReturn(0);
371   on  = mapping->n;
372   obs = mapping->bs;
373   oid = mapping->indices;
374   nn  = (on*obs)/bs;
375   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
376 
377   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
378   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
379   for (i=0;i<nn;i++) {
380     PetscInt j;
381     for (j=0,cn=0;j<bs-1;j++) {
382       if (oid[i*bs+j] < 0) { cn++; continue; }
383       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
384     }
385     if (oid[i*bs+j] < 0) cn++;
386     if (cn) {
387       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
388       nid[i] = -1;
389     } else {
390       nid[i] = oid[i*bs]/bs;
391     }
392   }
393   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
394 
395   mapping->n           = nn;
396   mapping->bs          = bs;
397   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
398   mapping->indices     = nid;
399   mapping->globalstart = 0;
400   mapping->globalend   = 0;
401 
402   /* reset the cached information */
403   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
404   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
405   if (mapping->info_indices) {
406     PetscInt i;
407 
408     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
409     for (i=1; i<mapping->info_nproc; i++) {
410       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
411     }
412     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
413   }
414   mapping->info_cached = PETSC_FALSE;
415 
416   if (mapping->ops->destroy) {
417     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 /*@
423     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
424     ordering and a global parallel ordering.
425 
426     Not Collective
427 
428     Input Parameters:
429 .   mapping - mapping data structure
430 
431     Output Parameter:
432 .   bs - the blocksize
433 
434     Level: advanced
435 
436     Concepts: mapping^local to global
437 
438 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
439 @*/
440 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
441 {
442   PetscFunctionBegin;
443   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
444   *bs = mapping->bs;
445   PetscFunctionReturn(0);
446 }
447 
448 /*@
449     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
450     ordering and a global parallel ordering.
451 
452     Not Collective, but communicator may have more than one process
453 
454     Input Parameters:
455 +   comm - MPI communicator
456 .   bs - the block size
457 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
458 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
459 -   mode - see PetscCopyMode
460 
461     Output Parameter:
462 .   mapping - new mapping data structure
463 
464     Notes:
465     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
466 
467     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
468     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
469     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
470 
471     Level: advanced
472 
473     Concepts: mapping^local to global
474 
475 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
476           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
477 @*/
478 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
479 {
480   PetscErrorCode ierr;
481   PetscInt       *in;
482 
483   PetscFunctionBegin;
484   if (n) PetscValidIntPointer(indices,3);
485   PetscValidPointer(mapping,4);
486 
487   *mapping = NULL;
488   ierr = ISInitializePackage();CHKERRQ(ierr);
489 
490   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
491                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
492   (*mapping)->n             = n;
493   (*mapping)->bs            = bs;
494   (*mapping)->info_cached   = PETSC_FALSE;
495   (*mapping)->info_free     = PETSC_FALSE;
496   (*mapping)->info_procs    = NULL;
497   (*mapping)->info_numprocs = NULL;
498   (*mapping)->info_indices  = NULL;
499   (*mapping)->info_nodec    = NULL;
500   (*mapping)->info_nodei    = NULL;
501 
502   (*mapping)->ops->globaltolocalmappingapply      = NULL;
503   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
504   (*mapping)->ops->destroy                        = NULL;
505   if (mode == PETSC_COPY_VALUES) {
506     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
507     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
508     (*mapping)->indices = in;
509     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
510   } else if (mode == PETSC_OWN_POINTER) {
511     (*mapping)->indices = (PetscInt*)indices;
512     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
513   }
514   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
515   PetscFunctionReturn(0);
516 }
517 
518 PetscFunctionList ISLocalToGlobalMappingList = NULL;
519 
520 
521 /*@
522    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
523 
524    Not collective
525 
526    Input Parameters:
527 .  mapping - mapping data structure
528 
529    Level: advanced
530 
531 @*/
532 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
533 {
534   PetscErrorCode             ierr;
535   char                       type[256];
536   ISLocalToGlobalMappingType defaulttype = "Not set";
537   PetscBool                  flg;
538 
539   PetscFunctionBegin;
540   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
541   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
542   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
543   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
544   if (flg) {
545     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
546   }
547   ierr = PetscOptionsEnd();CHKERRQ(ierr);
548   PetscFunctionReturn(0);
549 }
550 
551 /*@
552    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
553    ordering and a global parallel ordering.
554 
555    Note Collective
556 
557    Input Parameters:
558 .  mapping - mapping data structure
559 
560    Level: advanced
561 
562 .seealso: ISLocalToGlobalMappingCreate()
563 @*/
564 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
565 {
566   PetscErrorCode ierr;
567 
568   PetscFunctionBegin;
569   if (!*mapping) PetscFunctionReturn(0);
570   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
571   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
572   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
573   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
574   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
575   if ((*mapping)->info_indices) {
576     PetscInt i;
577 
578     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
579     for (i=1; i<(*mapping)->info_nproc; i++) {
580       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
581     }
582     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
583   }
584   if ((*mapping)->info_nodei) {
585     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
586   }
587   ierr = PetscFree((*mapping)->info_nodei);CHKERRQ(ierr);
588   ierr = PetscFree((*mapping)->info_nodec);CHKERRQ(ierr);
589   if ((*mapping)->ops->destroy) {
590     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
591   }
592   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
593   *mapping = 0;
594   PetscFunctionReturn(0);
595 }
596 
597 /*@
598     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
599     a new index set using the global numbering defined in an ISLocalToGlobalMapping
600     context.
601 
602     Not collective
603 
604     Input Parameters:
605 +   mapping - mapping between local and global numbering
606 -   is - index set in local numbering
607 
608     Output Parameters:
609 .   newis - index set in global numbering
610 
611     Level: advanced
612 
613     Concepts: mapping^local to global
614 
615 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
616           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
617 @*/
618 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
619 {
620   PetscErrorCode ierr;
621   PetscInt       n,*idxout;
622   const PetscInt *idxin;
623 
624   PetscFunctionBegin;
625   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
626   PetscValidHeaderSpecific(is,IS_CLASSID,2);
627   PetscValidPointer(newis,3);
628 
629   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
630   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
631   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
632   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
633   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
634   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
635   PetscFunctionReturn(0);
636 }
637 
638 /*@
639    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
640    and converts them to the global numbering.
641 
642    Not collective
643 
644    Input Parameters:
645 +  mapping - the local to global mapping context
646 .  N - number of integers
647 -  in - input indices in local numbering
648 
649    Output Parameter:
650 .  out - indices in global numbering
651 
652    Notes:
653    The in and out array parameters may be identical.
654 
655    Level: advanced
656 
657 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
658           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
659           AOPetscToApplication(), ISGlobalToLocalMappingApply()
660 
661     Concepts: mapping^local to global
662 @*/
663 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
664 {
665   PetscInt i,bs,Nmax;
666 
667   PetscFunctionBegin;
668   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
669   bs   = mapping->bs;
670   Nmax = bs*mapping->n;
671   if (bs == 1) {
672     const PetscInt *idx = mapping->indices;
673     for (i=0; i<N; i++) {
674       if (in[i] < 0) {
675         out[i] = in[i];
676         continue;
677       }
678       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
679       out[i] = idx[in[i]];
680     }
681   } else {
682     const PetscInt *idx = mapping->indices;
683     for (i=0; i<N; i++) {
684       if (in[i] < 0) {
685         out[i] = in[i];
686         continue;
687       }
688       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
689       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
690     }
691   }
692   PetscFunctionReturn(0);
693 }
694 
695 /*@
696    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
697 
698    Not collective
699 
700    Input Parameters:
701 +  mapping - the local to global mapping context
702 .  N - number of integers
703 -  in - input indices in local block numbering
704 
705    Output Parameter:
706 .  out - indices in global block numbering
707 
708    Notes:
709    The in and out array parameters may be identical.
710 
711    Example:
712      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
713      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
714 
715    Level: advanced
716 
717 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
718           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
719           AOPetscToApplication(), ISGlobalToLocalMappingApply()
720 
721     Concepts: mapping^local to global
722 @*/
723 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
724 {
725 
726   PetscFunctionBegin;
727   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
728   {
729     PetscInt i,Nmax = mapping->n;
730     const PetscInt *idx = mapping->indices;
731 
732     for (i=0; i<N; i++) {
733       if (in[i] < 0) {
734         out[i] = in[i];
735         continue;
736       }
737       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
738       out[i] = idx[in[i]];
739     }
740   }
741   PetscFunctionReturn(0);
742 }
743 
744 /*@
745     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
746     specified with a global numbering.
747 
748     Not collective
749 
750     Input Parameters:
751 +   mapping - mapping between local and global numbering
752 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
753            IS_GTOLM_DROP - drops the indices with no local value from the output list
754 .   n - number of global indices to map
755 -   idx - global indices to map
756 
757     Output Parameters:
758 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
759 -   idxout - local index of each global index, one must pass in an array long enough
760              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
761              idxout == NULL to determine the required length (returned in nout)
762              and then allocate the required space and call ISGlobalToLocalMappingApply()
763              a second time to set the values.
764 
765     Notes:
766     Either nout or idxout may be NULL. idx and idxout may be identical.
767 
768     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
769     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
770     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
771 
772     Level: advanced
773 
774     Developer Note: The manual page states that idx and idxout may be identical but the calling
775        sequence declares idx as const so it cannot be the same as idxout.
776 
777     Concepts: mapping^global to local
778 
779 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
780           ISLocalToGlobalMappingDestroy()
781 @*/
782 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
783 {
784   PetscErrorCode ierr;
785 
786   PetscFunctionBegin;
787   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
788   if (!mapping->data) {
789     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
790   }
791   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
792   PetscFunctionReturn(0);
793 }
794 
795 /*@
796     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
797     a new index set using the local numbering defined in an ISLocalToGlobalMapping
798     context.
799 
800     Not collective
801 
802     Input Parameters:
803 +   mapping - mapping between local and global numbering
804 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
805            IS_GTOLM_DROP - drops the indices with no local value from the output list
806 -   is - index set in global numbering
807 
808     Output Parameters:
809 .   newis - index set in local numbering
810 
811     Level: advanced
812 
813     Concepts: mapping^local to global
814 
815 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
816           ISLocalToGlobalMappingDestroy()
817 @*/
818 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
819 {
820   PetscErrorCode ierr;
821   PetscInt       n,nout,*idxout;
822   const PetscInt *idxin;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
826   PetscValidHeaderSpecific(is,IS_CLASSID,3);
827   PetscValidPointer(newis,4);
828 
829   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
830   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
831   if (type == IS_GTOLM_MASK) {
832     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
833   } else {
834     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
835     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
836   }
837   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
838   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
839   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 /*@
844     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
845     specified with a block global numbering.
846 
847     Not collective
848 
849     Input Parameters:
850 +   mapping - mapping between local and global numbering
851 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
852            IS_GTOLM_DROP - drops the indices with no local value from the output list
853 .   n - number of global indices to map
854 -   idx - global indices to map
855 
856     Output Parameters:
857 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
858 -   idxout - local index of each global index, one must pass in an array long enough
859              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
860              idxout == NULL to determine the required length (returned in nout)
861              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
862              a second time to set the values.
863 
864     Notes:
865     Either nout or idxout may be NULL. idx and idxout may be identical.
866 
867     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
868     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
869     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
870 
871     Level: advanced
872 
873     Developer Note: The manual page states that idx and idxout may be identical but the calling
874        sequence declares idx as const so it cannot be the same as idxout.
875 
876     Concepts: mapping^global to local
877 
878 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
879           ISLocalToGlobalMappingDestroy()
880 @*/
881 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
882                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
883 {
884   PetscErrorCode ierr;
885 
886   PetscFunctionBegin;
887   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
888   if (!mapping->data) {
889     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
890   }
891   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 
896 /*@C
897     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
898      each index shared by more than one processor
899 
900     Collective on ISLocalToGlobalMapping
901 
902     Input Parameters:
903 .   mapping - the mapping from local to global indexing
904 
905     Output Parameter:
906 +   nproc - number of processors that are connected to this one
907 .   proc - neighboring processors
908 .   numproc - number of indices for each subdomain (processor)
909 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
910 
911     Level: advanced
912 
913     Concepts: mapping^local to global
914 
915     Fortran Usage:
916 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
917 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
918           PetscInt indices[nproc][numprocmax],ierr)
919         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
920         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
921 
922 
923 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
924           ISLocalToGlobalMappingRestoreInfo()
925 @*/
926 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
927 {
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
932   if (mapping->info_cached) {
933     *nproc    = mapping->info_nproc;
934     *procs    = mapping->info_procs;
935     *numprocs = mapping->info_numprocs;
936     *indices  = mapping->info_indices;
937   } else {
938     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
939   }
940   PetscFunctionReturn(0);
941 }
942 
943 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
944 {
945   PetscErrorCode ierr;
946   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
947   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
948   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
949   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
950   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
951   PetscInt       first_procs,first_numprocs,*first_indices;
952   MPI_Request    *recv_waits,*send_waits;
953   MPI_Status     recv_status,*send_status,*recv_statuses;
954   MPI_Comm       comm;
955   PetscBool      debug = PETSC_FALSE;
956 
957   PetscFunctionBegin;
958   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
959   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
960   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
961   if (size == 1) {
962     *nproc         = 0;
963     *procs         = NULL;
964     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
965     (*numprocs)[0] = 0;
966     ierr           = PetscNew(indices);CHKERRQ(ierr);
967     (*indices)[0]  = NULL;
968     /* save info for reuse */
969     mapping->info_nproc = *nproc;
970     mapping->info_procs = *procs;
971     mapping->info_numprocs = *numprocs;
972     mapping->info_indices = *indices;
973     mapping->info_cached = PETSC_TRUE;
974     PetscFunctionReturn(0);
975   }
976 
977   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
978 
979   /*
980     Notes on ISLocalToGlobalMappingGetBlockInfo
981 
982     globally owned node - the nodes that have been assigned to this processor in global
983            numbering, just for this routine.
984 
985     nontrivial globally owned node - node assigned to this processor that is on a subdomain
986            boundary (i.e. is has more than one local owner)
987 
988     locally owned node - node that exists on this processors subdomain
989 
990     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
991            local subdomain
992   */
993   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
994   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
995   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
996 
997   for (i=0; i<n; i++) {
998     if (lindices[i] > max) max = lindices[i];
999   }
1000   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1001   Ng++;
1002   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1003   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1004   scale  = Ng/size + 1;
1005   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1006   rstart = scale*rank;
1007 
1008   /* determine ownership ranges of global indices */
1009   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1010   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1011 
1012   /* determine owners of each local node  */
1013   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1014   for (i=0; i<n; i++) {
1015     proc             = lindices[i]/scale; /* processor that globally owns this index */
1016     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1017     owner[i]         = proc;
1018     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1019   }
1020   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1021   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1022 
1023   /* inform other processors of number of messages and max length*/
1024   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1025   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1026 
1027   /* post receives for owned rows */
1028   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1029   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1030   for (i=0; i<nrecvs; i++) {
1031     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1032   }
1033 
1034   /* pack messages containing lists of local nodes to owners */
1035   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1036   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1037   starts[0] = 0;
1038   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1039   for (i=0; i<n; i++) {
1040     sends[starts[owner[i]]++] = lindices[i];
1041     sends[starts[owner[i]]++] = i;
1042   }
1043   ierr = PetscFree(owner);CHKERRQ(ierr);
1044   starts[0] = 0;
1045   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1046 
1047   /* send the messages */
1048   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1049   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1050   cnt = 0;
1051   for (i=0; i<size; i++) {
1052     if (nprocs[2*i]) {
1053       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1054       dest[cnt] = i;
1055       cnt++;
1056     }
1057   }
1058   ierr = PetscFree(starts);CHKERRQ(ierr);
1059 
1060   /* wait on receives */
1061   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1062   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1063   cnt  = nrecvs;
1064   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1065   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1066   while (cnt) {
1067     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1068     /* unpack receives into our local space */
1069     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1070     source[imdex] = recv_status.MPI_SOURCE;
1071     len[imdex]    = len[imdex]/2;
1072     /* count how many local owners for each of my global owned indices */
1073     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1074     cnt--;
1075   }
1076   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1077 
1078   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1079   nowned  = 0;
1080   nownedm = 0;
1081   for (i=0; i<ng; i++) {
1082     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1083   }
1084 
1085   /* create single array to contain rank of all local owners of each globally owned index */
1086   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1087   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1088   starts[0] = 0;
1089   for (i=1; i<ng; i++) {
1090     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1091     else starts[i] = starts[i-1];
1092   }
1093 
1094   /* for each nontrival globally owned node list all arriving processors */
1095   for (i=0; i<nrecvs; i++) {
1096     for (j=0; j<len[i]; j++) {
1097       node = recvs[2*i*nmax+2*j]-rstart;
1098       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1099     }
1100   }
1101 
1102   if (debug) { /* -----------------------------------  */
1103     starts[0] = 0;
1104     for (i=1; i<ng; i++) {
1105       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1106       else starts[i] = starts[i-1];
1107     }
1108     for (i=0; i<ng; i++) {
1109       if (nownedsenders[i] > 1) {
1110         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1111         for (j=0; j<nownedsenders[i]; j++) {
1112           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1113         }
1114         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1115       }
1116     }
1117     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1118   } /* -----------------------------------  */
1119 
1120   /* wait on original sends */
1121   if (nsends) {
1122     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1123     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1124     ierr = PetscFree(send_status);CHKERRQ(ierr);
1125   }
1126   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1127   ierr = PetscFree(sends);CHKERRQ(ierr);
1128   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1129 
1130   /* pack messages to send back to local owners */
1131   starts[0] = 0;
1132   for (i=1; i<ng; i++) {
1133     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1134     else starts[i] = starts[i-1];
1135   }
1136   nsends2 = nrecvs;
1137   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1138   for (i=0; i<nrecvs; i++) {
1139     nprocs[i] = 1;
1140     for (j=0; j<len[i]; j++) {
1141       node = recvs[2*i*nmax+2*j]-rstart;
1142       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1143     }
1144   }
1145   nt = 0;
1146   for (i=0; i<nsends2; i++) nt += nprocs[i];
1147 
1148   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1149   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1150 
1151   starts2[0] = 0;
1152   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1153   /*
1154      Each message is 1 + nprocs[i] long, and consists of
1155        (0) the number of nodes being sent back
1156        (1) the local node number,
1157        (2) the number of processors sharing it,
1158        (3) the processors sharing it
1159   */
1160   for (i=0; i<nsends2; i++) {
1161     cnt = 1;
1162     sends2[starts2[i]] = 0;
1163     for (j=0; j<len[i]; j++) {
1164       node = recvs[2*i*nmax+2*j]-rstart;
1165       if (nownedsenders[node] > 1) {
1166         sends2[starts2[i]]++;
1167         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1168         sends2[starts2[i]+cnt++] = nownedsenders[node];
1169         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1170         cnt += nownedsenders[node];
1171       }
1172     }
1173   }
1174 
1175   /* receive the message lengths */
1176   nrecvs2 = nsends;
1177   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1178   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1179   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1180   for (i=0; i<nrecvs2; i++) {
1181     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1182   }
1183 
1184   /* send the message lengths */
1185   for (i=0; i<nsends2; i++) {
1186     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1187   }
1188 
1189   /* wait on receives of lens */
1190   if (nrecvs2) {
1191     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1192     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1193     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1194   }
1195   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1196 
1197   starts3[0] = 0;
1198   nt         = 0;
1199   for (i=0; i<nrecvs2-1; i++) {
1200     starts3[i+1] = starts3[i] + lens2[i];
1201     nt          += lens2[i];
1202   }
1203   if (nrecvs2) nt += lens2[nrecvs2-1];
1204 
1205   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1206   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1207   for (i=0; i<nrecvs2; i++) {
1208     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1209   }
1210 
1211   /* send the messages */
1212   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1213   for (i=0; i<nsends2; i++) {
1214     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1215   }
1216 
1217   /* wait on receives */
1218   if (nrecvs2) {
1219     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1220     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1221     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1222   }
1223   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1224   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1225 
1226   if (debug) { /* -----------------------------------  */
1227     cnt = 0;
1228     for (i=0; i<nrecvs2; i++) {
1229       nt = recvs2[cnt++];
1230       for (j=0; j<nt; j++) {
1231         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1232         for (k=0; k<recvs2[cnt+1]; k++) {
1233           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1234         }
1235         cnt += 2 + recvs2[cnt+1];
1236         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1237       }
1238     }
1239     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1240   } /* -----------------------------------  */
1241 
1242   /* count number subdomains for each local node */
1243   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1244   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1245   cnt  = 0;
1246   for (i=0; i<nrecvs2; i++) {
1247     nt = recvs2[cnt++];
1248     for (j=0; j<nt; j++) {
1249       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1250       cnt += 2 + recvs2[cnt+1];
1251     }
1252   }
1253   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1254   *nproc    = nt;
1255   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1256   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1257   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1258   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1259   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1260   cnt  = 0;
1261   for (i=0; i<size; i++) {
1262     if (nprocs[i] > 0) {
1263       bprocs[i]        = cnt;
1264       (*procs)[cnt]    = i;
1265       (*numprocs)[cnt] = nprocs[i];
1266       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1267       cnt++;
1268     }
1269   }
1270 
1271   /* make the list of subdomains for each nontrivial local node */
1272   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1273   cnt  = 0;
1274   for (i=0; i<nrecvs2; i++) {
1275     nt = recvs2[cnt++];
1276     for (j=0; j<nt; j++) {
1277       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1278       cnt += 2 + recvs2[cnt+1];
1279     }
1280   }
1281   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1282   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1283 
1284   /* sort the node indexing by their global numbers */
1285   nt = *nproc;
1286   for (i=0; i<nt; i++) {
1287     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1288     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1289     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1290     ierr = PetscFree(tmp);CHKERRQ(ierr);
1291   }
1292 
1293   if (debug) { /* -----------------------------------  */
1294     nt = *nproc;
1295     for (i=0; i<nt; i++) {
1296       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1297       for (j=0; j<(*numprocs)[i]; j++) {
1298         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1299       }
1300       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1301     }
1302     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1303   } /* -----------------------------------  */
1304 
1305   /* wait on sends */
1306   if (nsends2) {
1307     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1308     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1309     ierr = PetscFree(send_status);CHKERRQ(ierr);
1310   }
1311 
1312   ierr = PetscFree(starts3);CHKERRQ(ierr);
1313   ierr = PetscFree(dest);CHKERRQ(ierr);
1314   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1315 
1316   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1317   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1318   ierr = PetscFree(starts);CHKERRQ(ierr);
1319   ierr = PetscFree(starts2);CHKERRQ(ierr);
1320   ierr = PetscFree(lens2);CHKERRQ(ierr);
1321 
1322   ierr = PetscFree(source);CHKERRQ(ierr);
1323   ierr = PetscFree(len);CHKERRQ(ierr);
1324   ierr = PetscFree(recvs);CHKERRQ(ierr);
1325   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1326   ierr = PetscFree(sends2);CHKERRQ(ierr);
1327 
1328   /* put the information about myself as the first entry in the list */
1329   first_procs    = (*procs)[0];
1330   first_numprocs = (*numprocs)[0];
1331   first_indices  = (*indices)[0];
1332   for (i=0; i<*nproc; i++) {
1333     if ((*procs)[i] == rank) {
1334       (*procs)[0]    = (*procs)[i];
1335       (*numprocs)[0] = (*numprocs)[i];
1336       (*indices)[0]  = (*indices)[i];
1337       (*procs)[i]    = first_procs;
1338       (*numprocs)[i] = first_numprocs;
1339       (*indices)[i]  = first_indices;
1340       break;
1341     }
1342   }
1343 
1344   /* save info for reuse */
1345   mapping->info_nproc = *nproc;
1346   mapping->info_procs = *procs;
1347   mapping->info_numprocs = *numprocs;
1348   mapping->info_indices = *indices;
1349   mapping->info_cached = PETSC_TRUE;
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 /*@C
1354     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1355 
1356     Collective on ISLocalToGlobalMapping
1357 
1358     Input Parameters:
1359 .   mapping - the mapping from local to global indexing
1360 
1361     Output Parameter:
1362 +   nproc - number of processors that are connected to this one
1363 .   proc - neighboring processors
1364 .   numproc - number of indices for each processor
1365 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1366 
1367     Level: advanced
1368 
1369 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1370           ISLocalToGlobalMappingGetInfo()
1371 @*/
1372 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1373 {
1374   PetscErrorCode ierr;
1375 
1376   PetscFunctionBegin;
1377   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1378   if (mapping->info_free) {
1379     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1380     if (*indices) {
1381       PetscInt i;
1382 
1383       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1384       for (i=1; i<*nproc; i++) {
1385         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1386       }
1387       ierr = PetscFree(*indices);CHKERRQ(ierr);
1388     }
1389   }
1390   *nproc    = 0;
1391   *procs    = NULL;
1392   *numprocs = NULL;
1393   *indices  = NULL;
1394   PetscFunctionReturn(0);
1395 }
1396 
1397 /*@C
1398     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1399      each index shared by more than one processor
1400 
1401     Collective on ISLocalToGlobalMapping
1402 
1403     Input Parameters:
1404 .   mapping - the mapping from local to global indexing
1405 
1406     Output Parameter:
1407 +   nproc - number of processors that are connected to this one
1408 .   proc - neighboring processors
1409 .   numproc - number of indices for each subdomain (processor)
1410 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1411 
1412     Level: advanced
1413 
1414     Concepts: mapping^local to global
1415 
1416     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1417 
1418     Fortran Usage:
1419 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1420 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1421           PetscInt indices[nproc][numprocmax],ierr)
1422         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1423         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1424 
1425 
1426 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1427           ISLocalToGlobalMappingRestoreInfo()
1428 @*/
1429 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1430 {
1431   PetscErrorCode ierr;
1432   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1433 
1434   PetscFunctionBegin;
1435   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1436   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1437   if (bs > 1) { /* we need to expand the cached info */
1438     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1439     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1440     for (i=0; i<*nproc; i++) {
1441       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1442       for (j=0; j<bnumprocs[i]; j++) {
1443         for (k=0; k<bs; k++) {
1444           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1445         }
1446       }
1447       (*numprocs)[i] = bnumprocs[i]*bs;
1448     }
1449     mapping->info_free = PETSC_TRUE;
1450   } else {
1451     *numprocs = bnumprocs;
1452     *indices  = bindices;
1453   }
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 /*@C
1458     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1459 
1460     Collective on ISLocalToGlobalMapping
1461 
1462     Input Parameters:
1463 .   mapping - the mapping from local to global indexing
1464 
1465     Output Parameter:
1466 +   nproc - number of processors that are connected to this one
1467 .   proc - neighboring processors
1468 .   numproc - number of indices for each processor
1469 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1470 
1471     Level: advanced
1472 
1473 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1474           ISLocalToGlobalMappingGetInfo()
1475 @*/
1476 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1477 {
1478   PetscErrorCode ierr;
1479 
1480   PetscFunctionBegin;
1481   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1482   PetscFunctionReturn(0);
1483 }
1484 
1485 /*@C
1486     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1487 
1488     Collective on ISLocalToGlobalMapping
1489 
1490     Input Parameters:
1491 .   mapping - the mapping from local to global indexing
1492 
1493     Output Parameter:
1494 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1495 .   count - number of neighboring processors per node
1496 -   indices - indices of processes sharing the node (sorted)
1497 
1498     Level: advanced
1499 
1500     Concepts: mapping^local to global
1501 
1502     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1503 
1504 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1505           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1506 @*/
1507 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1508 {
1509   PetscInt       n;
1510   PetscErrorCode ierr;
1511 
1512   PetscFunctionBegin;
1513   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1514   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1515   if (!mapping->info_nodec) {
1516     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1517 
1518     ierr = PetscCalloc1(n+1,&mapping->info_nodec);CHKERRQ(ierr); /* always allocate to flag setup */
1519     ierr = PetscMalloc1(n,&mapping->info_nodei);CHKERRQ(ierr);
1520     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1521     for (i=0,m=0;i<n;i++) { mapping->info_nodec[i] = 1; m++; }
1522     for (i=1;i<n_neigh;i++) {
1523       PetscInt j;
1524 
1525       m += n_shared[i];
1526       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1527     }
1528     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1529     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1530     ierr = PetscMemzero(mapping->info_nodec,n*sizeof(PetscInt));CHKERRQ(ierr);
1531     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1532     for (i=1;i<n_neigh;i++) {
1533       PetscInt j;
1534 
1535       for (j=0;j<n_shared[i];j++) {
1536         PetscInt k = shared[i][j];
1537 
1538         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1539         mapping->info_nodec[k] += 1;
1540       }
1541     }
1542     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1543     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1544   }
1545   if (nnodes)  *nnodes  = n;
1546   if (count)   *count   = mapping->info_nodec;
1547   if (indices) *indices = mapping->info_nodei;
1548   PetscFunctionReturn(0);
1549 }
1550 
1551 /*@C
1552     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1553 
1554     Collective on ISLocalToGlobalMapping
1555 
1556     Input Parameters:
1557 .   mapping - the mapping from local to global indexing
1558 
1559     Output Parameter:
1560 +   nnodes - number of local nodes
1561 .   count - number of neighboring processors per node
1562 -   indices - indices of processes sharing the node (sorted)
1563 
1564     Level: advanced
1565 
1566 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1567           ISLocalToGlobalMappingGetInfo()
1568 @*/
1569 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1570 {
1571   PetscFunctionBegin;
1572   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1573   if (nnodes)  *nnodes  = 0;
1574   if (count)   *count   = NULL;
1575   if (indices) *indices = NULL;
1576   PetscFunctionReturn(0);
1577 }
1578 
1579 /*@C
1580    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1581 
1582    Not Collective
1583 
1584    Input Arguments:
1585 . ltog - local to global mapping
1586 
1587    Output Arguments:
1588 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1589 
1590    Level: advanced
1591 
1592    Notes:
1593     ISLocalToGlobalMappingGetSize() returns the length the this array
1594 
1595 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1596 @*/
1597 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1598 {
1599   PetscFunctionBegin;
1600   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1601   PetscValidPointer(array,2);
1602   if (ltog->bs == 1) {
1603     *array = ltog->indices;
1604   } else {
1605     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1606     const PetscInt *ii;
1607     PetscErrorCode ierr;
1608 
1609     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1610     *array = jj;
1611     k    = 0;
1612     ii   = ltog->indices;
1613     for (i=0; i<n; i++)
1614       for (j=0; j<bs; j++)
1615         jj[k++] = bs*ii[i] + j;
1616   }
1617   PetscFunctionReturn(0);
1618 }
1619 
1620 /*@C
1621    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1622 
1623    Not Collective
1624 
1625    Input Arguments:
1626 + ltog - local to global mapping
1627 - array - array of indices
1628 
1629    Level: advanced
1630 
1631 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1632 @*/
1633 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1634 {
1635   PetscFunctionBegin;
1636   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1637   PetscValidPointer(array,2);
1638   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1639 
1640   if (ltog->bs > 1) {
1641     PetscErrorCode ierr;
1642     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1643   }
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 /*@C
1648    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1649 
1650    Not Collective
1651 
1652    Input Arguments:
1653 . ltog - local to global mapping
1654 
1655    Output Arguments:
1656 . array - array of indices
1657 
1658    Level: advanced
1659 
1660 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1661 @*/
1662 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1663 {
1664   PetscFunctionBegin;
1665   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1666   PetscValidPointer(array,2);
1667   *array = ltog->indices;
1668   PetscFunctionReturn(0);
1669 }
1670 
1671 /*@C
1672    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1673 
1674    Not Collective
1675 
1676    Input Arguments:
1677 + ltog - local to global mapping
1678 - array - array of indices
1679 
1680    Level: advanced
1681 
1682 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1683 @*/
1684 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1685 {
1686   PetscFunctionBegin;
1687   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1688   PetscValidPointer(array,2);
1689   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1690   *array = NULL;
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 /*@C
1695    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1696 
1697    Not Collective
1698 
1699    Input Arguments:
1700 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1701 . n - number of mappings to concatenate
1702 - ltogs - local to global mappings
1703 
1704    Output Arguments:
1705 . ltogcat - new mapping
1706 
1707    Note: this currently always returns a mapping with block size of 1
1708 
1709    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1710 
1711    Level: advanced
1712 
1713 .seealso: ISLocalToGlobalMappingCreate()
1714 @*/
1715 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1716 {
1717   PetscInt       i,cnt,m,*idx;
1718   PetscErrorCode ierr;
1719 
1720   PetscFunctionBegin;
1721   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1722   if (n > 0) PetscValidPointer(ltogs,3);
1723   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1724   PetscValidPointer(ltogcat,4);
1725   for (cnt=0,i=0; i<n; i++) {
1726     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1727     cnt += m;
1728   }
1729   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1730   for (cnt=0,i=0; i<n; i++) {
1731     const PetscInt *subidx;
1732     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1733     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1734     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1735     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1736     cnt += m;
1737   }
1738   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1739   PetscFunctionReturn(0);
1740 }
1741 
1742 /*MC
1743       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1744                                     used this is good for only small and moderate size problems.
1745 
1746    Options Database Keys:
1747 +   -islocaltoglobalmapping_type basic - select this method
1748 
1749    Level: beginner
1750 
1751 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1752 M*/
1753 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1754 {
1755   PetscFunctionBegin;
1756   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1757   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1758   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1759   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1760   PetscFunctionReturn(0);
1761 }
1762 
1763 /*MC
1764       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1765                                     used this is good for large memory problems.
1766 
1767    Options Database Keys:
1768 +   -islocaltoglobalmapping_type hash - select this method
1769 
1770 
1771    Notes:
1772     This is selected automatically for large problems if the user does not set the type.
1773 
1774    Level: beginner
1775 
1776 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1777 M*/
1778 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1779 {
1780   PetscFunctionBegin;
1781   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1782   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1783   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1784   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1785   PetscFunctionReturn(0);
1786 }
1787 
1788 
1789 /*@C
1790     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1791 
1792    Not Collective
1793 
1794    Input Parameters:
1795 +  sname - name of a new method
1796 -  routine_create - routine to create method context
1797 
1798    Notes:
1799    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1800 
1801    Sample usage:
1802 .vb
1803    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1804 .ve
1805 
1806    Then, your mapping can be chosen with the procedural interface via
1807 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1808    or at runtime via the option
1809 $     -islocaltoglobalmapping_type my_mapper
1810 
1811    Level: advanced
1812 
1813 .keywords: ISLocalToGlobalMappingType, register
1814 
1815 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1816 
1817 @*/
1818 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1819 {
1820   PetscErrorCode ierr;
1821 
1822   PetscFunctionBegin;
1823   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1824   PetscFunctionReturn(0);
1825 }
1826 
1827 /*@C
1828    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1829 
1830    Logically Collective on ISLocalToGlobalMapping
1831 
1832    Input Parameters:
1833 +  ltog - the ISLocalToGlobalMapping object
1834 -  type - a known method
1835 
1836    Options Database Key:
1837 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1838     of available methods (for instance, basic or hash)
1839 
1840    Notes:
1841    See "petsc/include/petscis.h" for available methods
1842 
1843   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1844   then set the ISLocalToGlobalMapping type from the options database rather than by using
1845   this routine.
1846 
1847   Level: intermediate
1848 
1849   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1850   are accessed by ISLocalToGlobalMappingSetType().
1851 
1852 .keywords: ISLocalToGlobalMapping, set, method
1853 
1854 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1855 
1856 @*/
1857 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1858 {
1859   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1860   PetscBool      match;
1861 
1862   PetscFunctionBegin;
1863   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1864   PetscValidCharPointer(type,2);
1865 
1866   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1867   if (match) PetscFunctionReturn(0);
1868 
1869   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1870   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1871   /* Destroy the previous private LTOG context */
1872   if (ltog->ops->destroy) {
1873     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1874     ltog->ops->destroy = NULL;
1875   }
1876   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1877   ierr = (*r)(ltog);CHKERRQ(ierr);
1878   PetscFunctionReturn(0);
1879 }
1880 
1881 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1882 
1883 /*@C
1884   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1885 
1886   Not Collective
1887 
1888   Level: advanced
1889 
1890 .keywords: IS, register, all
1891 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1892 @*/
1893 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1894 {
1895   PetscErrorCode ierr;
1896 
1897   PetscFunctionBegin;
1898   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1899   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1900 
1901   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1902   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1903   PetscFunctionReturn(0);
1904 }
1905 
1906