xref: /petsc/src/vec/is/utils/isltog.c (revision 82b42fd2d4e4dd3a781138888337060766935908)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 /*@
10     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   nltog - the duplicated local to global mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
27 {
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
32   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 /*@
37     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
38 
39     Not Collective
40 
41     Input Parameter:
42 .   ltog - local to global mapping
43 
44     Output Parameter:
45 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
54 {
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
57   PetscValidIntPointer(n,2);
58   *n = mapping->bs*mapping->n;
59   PetscFunctionReturn(0);
60 }
61 
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 /*@
106     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
107     ordering and a global parallel ordering.
108 
109     Not collective
110 
111     Input Parameter:
112 .   is - index set containing the global numbers for each local number
113 
114     Output Parameter:
115 .   mapping - new mapping data structure
116 
117     Notes: the block size of the IS determines the block size of the mapping
118     Level: advanced
119 
120     Concepts: mapping^local to global
121 
122 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
123 @*/
124 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
125 {
126   PetscErrorCode ierr;
127   PetscInt       n,bs;
128   const PetscInt *indices;
129   MPI_Comm       comm;
130   PetscBool      isblock;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(is,IS_CLASSID,1);
134   PetscValidPointer(mapping,2);
135 
136   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
137   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
138   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
139   if (!isblock) {
140     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
141     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
142     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
143   } else {
144     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
145     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
146     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
147     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 /*@C
153     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
154     ordering and a global parallel ordering.
155 
156     Collective
157 
158     Input Parameter:
159 +   sf - star forest mapping contiguous local indices to (rank, offset)
160 -   start - first global index on this process
161 
162     Output Parameter:
163 .   mapping - new mapping data structure
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
170 @*/
171 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
172 {
173   PetscErrorCode ierr;
174   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
175   const PetscInt *ilocal;
176   MPI_Comm       comm;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
180   PetscValidPointer(mapping,3);
181 
182   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
183   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
184   if (ilocal) {
185     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
186   }
187   else maxlocal = nleaves;
188   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
189   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
190   for (i=0; i<nroots; i++) globals[i] = start + i;
191   for (i=0; i<maxlocal; i++) ltog[i] = -1;
192   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
193   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
194   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
195   ierr = PetscFree(globals);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
201 
202     Not collective
203 
204     Input Parameters:
205 .   mapping - mapping data structure
206 .   bs - the blocksize
207 
208     Level: advanced
209 
210     Concepts: mapping^local to global
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
215 {
216   PetscInt       *nid;
217   const PetscInt *oid;
218   PetscInt       i,cn,on,obs,nn;
219   PetscErrorCode ierr;
220 
221   PetscFunctionBegin;
222   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
223   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
224   if (bs == mapping->bs) PetscFunctionReturn(0);
225   on  = mapping->n;
226   obs = mapping->bs;
227   oid = mapping->indices;
228   nn  = (on*obs)/bs;
229   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
230 
231   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
232   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
233   for (i=0;i<nn;i++) {
234     PetscInt j;
235     for (j=0,cn=0;j<bs-1;j++) {
236       if (oid[i*bs+j] < 0) { cn++; continue; }
237       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
238     }
239     if (oid[i*bs+j] < 0) cn++;
240     if (cn) {
241       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
242       nid[i] = -1;
243     } else {
244       nid[i] = oid[i*bs]/bs;
245     }
246   }
247   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
248 
249   mapping->n           = nn;
250   mapping->bs          = bs;
251   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
252   mapping->indices     = nid;
253   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
254   mapping->globalstart = 0;
255   mapping->globalend   = 0;
256   PetscFunctionReturn(0);
257 }
258 
259 /*@
260     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
261     ordering and a global parallel ordering.
262 
263     Not Collective
264 
265     Input Parameters:
266 .   mapping - mapping data structure
267 
268     Output Parameter:
269 .   bs - the blocksize
270 
271     Level: advanced
272 
273     Concepts: mapping^local to global
274 
275 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
276 @*/
277 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
278 {
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
281   *bs = mapping->bs;
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
287     ordering and a global parallel ordering.
288 
289     Not Collective, but communicator may have more than one process
290 
291     Input Parameters:
292 +   comm - MPI communicator
293 .   bs - the block size
294 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
295 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
296 -   mode - see PetscCopyMode
297 
298     Output Parameter:
299 .   mapping - new mapping data structure
300 
301     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
302     Level: advanced
303 
304     Concepts: mapping^local to global
305 
306 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
307 @*/
308 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
309 {
310   PetscErrorCode ierr;
311   PetscInt       *in;
312 
313   PetscFunctionBegin;
314   if (n) PetscValidIntPointer(indices,3);
315   PetscValidPointer(mapping,4);
316 
317   *mapping = NULL;
318   ierr = ISInitializePackage();CHKERRQ(ierr);
319 
320   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
321                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
322   (*mapping)->n             = n;
323   (*mapping)->bs            = bs;
324   (*mapping)->info_cached   = PETSC_FALSE;
325   (*mapping)->info_free     = PETSC_FALSE;
326   (*mapping)->info_procs    = NULL;
327   (*mapping)->info_numprocs = NULL;
328   (*mapping)->info_indices  = NULL;
329   /*
330     Do not create the global to local mapping. This is only created if
331     ISGlobalToLocalMapping() is called
332   */
333   (*mapping)->globals        = 0;
334   (*mapping)->globalht       = 0;
335   (*mapping)->use_hash_table = PETSC_FALSE;
336   ierr = ISLocalToGlobalMappingSetFromOptions(*mapping);CHKERRQ(ierr);
337   if (mode == PETSC_COPY_VALUES) {
338     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
339     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
340     (*mapping)->indices = in;
341     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
342   } else if (mode == PETSC_OWN_POINTER) {
343     (*mapping)->indices = (PetscInt*)indices;
344     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
345   }
346   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
347   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
348   PetscFunctionReturn(0);
349 }
350 
351 /*@
352    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
353 
354    Not collective
355 
356    Input Parameters:
357 .  mapping - mapping data structure
358 
359    Options Database:
360 .   -ltogm_use_hash_table true,false see ISLocalToGlobalMappingSetUseHashTable()
361 
362    Level: advanced
363 
364 .seealso: ISLocalToGlobalMappingSetUseHashTable()
365 @*/
366 
367 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
368 {
369   PetscErrorCode ierr;
370   PetscBool      flg;
371 
372   PetscFunctionBegin;
373   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
374 
375   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
376 
377   ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&flg);CHKERRQ(ierr);
378   ierr = PetscOptionsBool("-ltogm_use_hash_table", "use hash table (memory scalable for global to local in ISLtoGMappings","ISLocalToGlobalMappingSetUseHashTable",flg,&flg,NULL);CHKERRQ(ierr);
379   ierr = ISLocalToGlobalMappingSetUseHashTable(mapping,flg);CHKERRQ(ierr);
380   ierr = PetscOptionsEnd();CHKERRQ(ierr);
381   PetscFunctionReturn(0);
382 }
383 
384 /*@
385    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
386    ordering and a global parallel ordering.
387 
388    Note Collective
389 
390    Input Parameters:
391 .  mapping - mapping data structure
392 
393    Level: advanced
394 
395 .seealso: ISLocalToGlobalMappingCreate()
396 @*/
397 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
398 {
399   PetscErrorCode ierr;
400 
401   PetscFunctionBegin;
402   if (!*mapping) PetscFunctionReturn(0);
403   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
404   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
405   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
406   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
407   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
408   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
409   PetscHashIDestroy((*mapping)->globalht);
410   if ((*mapping)->info_indices) {
411     PetscInt i;
412 
413     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
414     for (i=1; i<(*mapping)->info_nproc; i++) {
415       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
416     }
417     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
418   }
419   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
420   *mapping = 0;
421   PetscFunctionReturn(0);
422 }
423 
424 /*@
425     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
426     a new index set using the global numbering defined in an ISLocalToGlobalMapping
427     context.
428 
429     Not collective
430 
431     Input Parameters:
432 +   mapping - mapping between local and global numbering
433 -   is - index set in local numbering
434 
435     Output Parameters:
436 .   newis - index set in global numbering
437 
438     Level: advanced
439 
440     Concepts: mapping^local to global
441 
442 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
443           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
444 @*/
445 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
446 {
447   PetscErrorCode ierr;
448   PetscInt       n,*idxout;
449   const PetscInt *idxin;
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
453   PetscValidHeaderSpecific(is,IS_CLASSID,2);
454   PetscValidPointer(newis,3);
455 
456   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
457   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
458   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
459   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
460   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
461   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
462   PetscFunctionReturn(0);
463 }
464 
465 /*@
466    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
467    and converts them to the global numbering.
468 
469    Not collective
470 
471    Input Parameters:
472 +  mapping - the local to global mapping context
473 .  N - number of integers
474 -  in - input indices in local numbering
475 
476    Output Parameter:
477 .  out - indices in global numbering
478 
479    Notes:
480    The in and out array parameters may be identical.
481 
482    Level: advanced
483 
484 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
485           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
486           AOPetscToApplication(), ISGlobalToLocalMappingApply()
487 
488     Concepts: mapping^local to global
489 @*/
490 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
491 {
492   PetscInt i,bs,Nmax;
493 
494   PetscFunctionBegin;
495   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
496   bs   = mapping->bs;
497   Nmax = bs*mapping->n;
498   if (bs == 1) {
499     const PetscInt *idx = mapping->indices;
500     for (i=0; i<N; i++) {
501       if (in[i] < 0) {
502         out[i] = in[i];
503         continue;
504       }
505       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
506       out[i] = idx[in[i]];
507     }
508   } else {
509     const PetscInt *idx = mapping->indices;
510     for (i=0; i<N; i++) {
511       if (in[i] < 0) {
512         out[i] = in[i];
513         continue;
514       }
515       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
516       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
517     }
518   }
519   PetscFunctionReturn(0);
520 }
521 
522 /*@
523    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
524 
525    Not collective
526 
527    Input Parameters:
528 +  mapping - the local to global mapping context
529 .  N - number of integers
530 -  in - input indices in local block numbering
531 
532    Output Parameter:
533 .  out - indices in global block numbering
534 
535    Notes:
536    The in and out array parameters may be identical.
537 
538    Example:
539      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
540      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
541 
542    Level: advanced
543 
544 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
545           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
546           AOPetscToApplication(), ISGlobalToLocalMappingApply()
547 
548     Concepts: mapping^local to global
549 @*/
550 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
551 {
552 
553   PetscFunctionBegin;
554   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
555   {
556     PetscInt i,Nmax = mapping->n;
557     const PetscInt *idx = mapping->indices;
558 
559     for (i=0; i<N; i++) {
560       if (in[i] < 0) {
561         out[i] = in[i];
562         continue;
563       }
564       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
565       out[i] = idx[in[i]];
566     }
567   }
568   PetscFunctionReturn(0);
569 }
570 
571 /* -----------------------------------------------------------------------------------------*/
572 
573 /*
574     Creates the global fields in the ISLocalToGlobalMapping structure
575 */
576 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
577 {
578   PetscErrorCode ierr;
579   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
580   PetscBool      use_hash_table;
581 
582   PetscFunctionBegin;
583   ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&use_hash_table);CHKERRQ(ierr);
584   if (use_hash_table && mapping->globalht) PetscFunctionReturn(0);
585   else if (mapping->globals) PetscFunctionReturn(0);
586   end   = 0;
587   start = PETSC_MAX_INT;
588 
589   for (i=0; i<n; i++) {
590     if (idx[i] < 0) continue;
591     if (idx[i] < start) start = idx[i];
592     if (idx[i] > end)   end   = idx[i];
593   }
594   if (start > end) {start = 0; end = -1;}
595   mapping->globalstart = start;
596   mapping->globalend   = end;
597 
598   if (mapping->use_hash_table) {
599     PetscHashICreate(mapping->globalht);
600     for (i=0; i<n; i++ ) {
601       if (idx[i] < 0) continue;
602       PetscHashIAdd(mapping->globalht, idx[i], i);
603     }
604     ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
605   } else {
606     ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
607     mapping->globals = globals;
608     for (i=0; i<end-start+1; i++) globals[i] = -1;
609     for (i=0; i<n; i++) {
610       if (idx[i] < 0) continue;
611       globals[idx[i] - start] = i;
612     }
613 
614     ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
615   }
616   PetscFunctionReturn(0);
617 }
618 
619 /*@
620     ISLocalToGlobalMappingSetUseHashTable - Should global to local lookups use a hash table?
621 
622     This is memory-scalable, but slightly slower, implementation than the default flat array.
623 
624     Not collective
625 
626     Input parameters:
627 +   mapping - mapping between local and global numbering
628 -   flg - use a hash table?
629 
630     Level: advanced
631 
632 .seealso: ISGlobalToLocalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingSetFromOptions(), ISLocalToGlobalMappingGetUseHashTable()
633 
634 */
635 PetscErrorCode ISLocalToGlobalMappingSetUseHashTable(ISLocalToGlobalMapping mapping, PetscBool flg)
636 {
637   PetscFunctionBegin;
638 
639   mapping->use_hash_table = flg;
640 
641   PetscFunctionReturn(0);
642 }
643 
644 /*@
645     ISLocalToGlobalMappingGetUseHashTable - Do global to local lookups use a hash table?
646 
647     This is memory-scalable, but slightly slower, implementation than the default flat array.
648 
649     Not collective
650 
651     Input parameter:
652 +   mapping - mapping between local and global numbering
653 
654     Output parameter:
655 +   flg - does the mapping use a hash table?
656 
657     Level: advanced
658 
659 .seealso: ISGlobalToLocalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingSetFromOptions(), ISLocalToGlobalMappingSetUseHashTable()
660 */
661 PetscErrorCode ISLocalToGlobalMappingGetUseHashTable(ISLocalToGlobalMapping mapping, PetscBool *flg)
662 {
663   PetscFunctionBegin;
664 
665   *flg = mapping->use_hash_table;
666 
667   PetscFunctionReturn(0);
668 }
669 
670 /*@
671     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
672     specified with a global numbering.
673 
674     Not collective
675 
676     Input Parameters:
677 +   mapping - mapping between local and global numbering
678 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
679            IS_GTOLM_DROP - drops the indices with no local value from the output list
680 .   n - number of global indices to map
681 -   idx - global indices to map
682 
683     Output Parameters:
684 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
685 -   idxout - local index of each global index, one must pass in an array long enough
686              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
687              idxout == NULL to determine the required length (returned in nout)
688              and then allocate the required space and call ISGlobalToLocalMappingApply()
689              a second time to set the values.
690 
691     Notes:
692     Either nout or idxout may be NULL. idx and idxout may be identical.
693 
694     Unless the option -ltogm_use_hash_table is used, this is not scalable in memory usage. Each processor requires
695     O(Nglobal) size array to compute these.
696 
697     Level: advanced
698 
699     Developer Note: The manual page states that idx and idxout may be identical but the calling
700        sequence declares idx as const so it cannot be the same as idxout.
701 
702     Concepts: mapping^global to local
703 
704 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
705           ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingGetUseHashTable(), ISLocalToGlobalMappingSetUseHashTable()
706 @*/
707 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
708                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
709 {
710   PetscInt       i,*globals,nf = 0,tmp,start,end,bs,local;
711   PetscBool      use_hash_table;
712   PetscErrorCode ierr;
713 
714   PetscFunctionBegin;
715   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
716   ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
717   ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&use_hash_table);CHKERRQ(ierr);
718   globals = mapping->globals;
719   start   = mapping->globalstart;
720   end     = mapping->globalend;
721   bs      = mapping->bs;
722 
723 #define GTOL(g, local) do {                        \
724     if (use_hash_table) {                          \
725       PetscHashIMap(mapping->globalht,g/bs,local); \
726     } else {                                       \
727       local = globals[g/bs - start];               \
728     }                                              \
729     local = bs*local + (g % bs);                   \
730   } while (0)
731 
732   if (type == IS_GTOLM_MASK) {
733     if (idxout) {
734       for (i=0; i<n; i++) {
735         if (idx[i] < 0)                   idxout[i] = idx[i];
736         else if (idx[i] < bs*start)       idxout[i] = -1;
737         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
738         else                              GTOL(idx[i], local);
739       }
740     }
741     if (nout) *nout = n;
742   } else {
743     if (idxout) {
744       for (i=0; i<n; i++) {
745         if (idx[i] < 0) continue;
746         if (idx[i] < bs*start) continue;
747         if (idx[i] > bs*(end+1)-1) continue;
748         GTOL(idx[i], tmp);
749         if (tmp < 0) continue;
750         idxout[nf++] = tmp;
751       }
752     } else {
753       for (i=0; i<n; i++) {
754         if (idx[i] < 0) continue;
755         if (idx[i] < bs*start) continue;
756         if (idx[i] > bs*(end+1)-1) continue;
757         GTOL(idx[i], tmp);
758         if (tmp < 0) continue;
759         nf++;
760       }
761     }
762     if (nout) *nout = nf;
763   }
764 #undef GTOL
765   PetscFunctionReturn(0);
766 }
767 
768 /*@
769     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
770     a new index set using the local numbering defined in an ISLocalToGlobalMapping
771     context.
772 
773     Not collective
774 
775     Input Parameters:
776 +   mapping - mapping between local and global numbering
777 -   is - index set in global numbering
778 
779     Output Parameters:
780 .   newis - index set in local numbering
781 
782     Level: advanced
783 
784     Concepts: mapping^local to global
785 
786 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
787           ISLocalToGlobalMappingDestroy()
788 @*/
789 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
790 {
791   PetscErrorCode ierr;
792   PetscInt       n,nout,*idxout;
793   const PetscInt *idxin;
794 
795   PetscFunctionBegin;
796   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
797   PetscValidHeaderSpecific(is,IS_CLASSID,3);
798   PetscValidPointer(newis,4);
799 
800   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
801   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
802   if (type == IS_GTOLM_MASK) {
803     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
804   } else {
805     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
806     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
807   }
808   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
809   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
810   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 /*@
815     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
816     specified with a block global numbering.
817 
818     Not collective
819 
820     Input Parameters:
821 +   mapping - mapping between local and global numbering
822 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
823            IS_GTOLM_DROP - drops the indices with no local value from the output list
824 .   n - number of global indices to map
825 -   idx - global indices to map
826 
827     Output Parameters:
828 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
829 -   idxout - local index of each global index, one must pass in an array long enough
830              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
831              idxout == NULL to determine the required length (returned in nout)
832              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
833              a second time to set the values.
834 
835     Notes:
836     Either nout or idxout may be NULL. idx and idxout may be identical.
837 
838     Unless the option -ltogm_use_hash_table is used, this is not scalable in memory usage. Each processor requires
839     O(Nglobal) size array to compute these.
840 
841     Level: advanced
842 
843     Developer Note: The manual page states that idx and idxout may be identical but the calling
844        sequence declares idx as const so it cannot be the same as idxout.
845 
846     Concepts: mapping^global to local
847 
848 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
849           ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingGetUseHashTable(), ISLocalToGlobalMappingSetUseHashTable()
850 @*/
851 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
852                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
853 {
854   PetscInt       i,*globals,nf = 0,tmp,start,end;
855   PetscBool      use_hash_table;
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
860   ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
861   ierr = ISLocalToGlobalMappingGetUseHashTable(mapping,&use_hash_table);CHKERRQ(ierr);
862   globals = mapping->globals;
863   start   = mapping->globalstart;
864   end     = mapping->globalend;
865 
866 #define GTOL(g, local) do {                     \
867     if (use_hash_table) {                       \
868       PetscHashIMap(mapping->globalht,g,local); \
869     } else {                                    \
870       local = globals[g - start];               \
871     }                                           \
872   } while (0)
873 
874   if (type == IS_GTOLM_MASK) {
875     if (idxout) {
876       for (i=0; i<n; i++) {
877         if (idx[i] < 0) idxout[i] = idx[i];
878         else if (idx[i] < start) idxout[i] = -1;
879         else if (idx[i] > end)   idxout[i] = -1;
880         else                     GTOL(idx[i], idxout[i]);
881       }
882     }
883     if (nout) *nout = n;
884   } else {
885     if (idxout) {
886       for (i=0; i<n; i++) {
887         if (idx[i] < 0) continue;
888         if (idx[i] < start) continue;
889         if (idx[i] > end) continue;
890         GTOL(idx[i], tmp);
891         if (tmp < 0) continue;
892         idxout[nf++] = tmp;
893       }
894     } else {
895       for (i=0; i<n; i++) {
896         if (idx[i] < 0) continue;
897         if (idx[i] < start) continue;
898         if (idx[i] > end) continue;
899         GTOL(idx[i], tmp);
900         if (tmp < 0) continue;
901         nf++;
902       }
903     }
904     if (nout) *nout = nf;
905   }
906 #undef GTOL
907   PetscFunctionReturn(0);
908 }
909 
910 /*@C
911     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
912      each index shared by more than one processor
913 
914     Collective on ISLocalToGlobalMapping
915 
916     Input Parameters:
917 .   mapping - the mapping from local to global indexing
918 
919     Output Parameter:
920 +   nproc - number of processors that are connected to this one
921 .   proc - neighboring processors
922 .   numproc - number of indices for each subdomain (processor)
923 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
924 
925     Level: advanced
926 
927     Concepts: mapping^local to global
928 
929     Fortran Usage:
930 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
931 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
932           PetscInt indices[nproc][numprocmax],ierr)
933         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
934         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
935 
936 
937 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
938           ISLocalToGlobalMappingRestoreInfo()
939 @*/
940 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
941 {
942   PetscErrorCode ierr;
943 
944   PetscFunctionBegin;
945   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
946   if (mapping->info_cached) {
947     *nproc = mapping->info_nproc;
948     *procs = mapping->info_procs;
949     *numprocs = mapping->info_numprocs;
950     *indices = mapping->info_indices;
951   } else {
952     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
953   }
954   PetscFunctionReturn(0);
955 }
956 
957 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
958 {
959   PetscErrorCode ierr;
960   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
961   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
962   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
963   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
964   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
965   PetscInt       first_procs,first_numprocs,*first_indices;
966   MPI_Request    *recv_waits,*send_waits;
967   MPI_Status     recv_status,*send_status,*recv_statuses;
968   MPI_Comm       comm;
969   PetscBool      debug = PETSC_FALSE;
970 
971   PetscFunctionBegin;
972   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
973   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
974   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
975   if (size == 1) {
976     *nproc         = 0;
977     *procs         = NULL;
978     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
979     (*numprocs)[0] = 0;
980     ierr           = PetscNew(indices);CHKERRQ(ierr);
981     (*indices)[0]  = NULL;
982     /* save info for reuse */
983     mapping->info_nproc = *nproc;
984     mapping->info_procs = *procs;
985     mapping->info_numprocs = *numprocs;
986     mapping->info_indices = *indices;
987     mapping->info_cached = PETSC_TRUE;
988     PetscFunctionReturn(0);
989   }
990 
991   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
992 
993   /*
994     Notes on ISLocalToGlobalMappingGetBlockInfo
995 
996     globally owned node - the nodes that have been assigned to this processor in global
997            numbering, just for this routine.
998 
999     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1000            boundary (i.e. is has more than one local owner)
1001 
1002     locally owned node - node that exists on this processors subdomain
1003 
1004     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1005            local subdomain
1006   */
1007   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1008   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1009   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1010 
1011   for (i=0; i<n; i++) {
1012     if (lindices[i] > max) max = lindices[i];
1013   }
1014   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1015   Ng++;
1016   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1017   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1018   scale  = Ng/size + 1;
1019   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1020   rstart = scale*rank;
1021 
1022   /* determine ownership ranges of global indices */
1023   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1024   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1025 
1026   /* determine owners of each local node  */
1027   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1028   for (i=0; i<n; i++) {
1029     proc             = lindices[i]/scale; /* processor that globally owns this index */
1030     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1031     owner[i]         = proc;
1032     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1033   }
1034   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1035   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1036 
1037   /* inform other processors of number of messages and max length*/
1038   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1039   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1040 
1041   /* post receives for owned rows */
1042   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1043   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1044   for (i=0; i<nrecvs; i++) {
1045     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1046   }
1047 
1048   /* pack messages containing lists of local nodes to owners */
1049   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1050   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1051   starts[0] = 0;
1052   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1053   for (i=0; i<n; i++) {
1054     sends[starts[owner[i]]++] = lindices[i];
1055     sends[starts[owner[i]]++] = i;
1056   }
1057   ierr = PetscFree(owner);CHKERRQ(ierr);
1058   starts[0] = 0;
1059   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1060 
1061   /* send the messages */
1062   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1063   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1064   cnt = 0;
1065   for (i=0; i<size; i++) {
1066     if (nprocs[2*i]) {
1067       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1068       dest[cnt] = i;
1069       cnt++;
1070     }
1071   }
1072   ierr = PetscFree(starts);CHKERRQ(ierr);
1073 
1074   /* wait on receives */
1075   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1076   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1077   cnt  = nrecvs;
1078   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1079   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1080   while (cnt) {
1081     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1082     /* unpack receives into our local space */
1083     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1084     source[imdex] = recv_status.MPI_SOURCE;
1085     len[imdex]    = len[imdex]/2;
1086     /* count how many local owners for each of my global owned indices */
1087     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1088     cnt--;
1089   }
1090   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1091 
1092   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1093   nowned  = 0;
1094   nownedm = 0;
1095   for (i=0; i<ng; i++) {
1096     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1097   }
1098 
1099   /* create single array to contain rank of all local owners of each globally owned index */
1100   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1101   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1102   starts[0] = 0;
1103   for (i=1; i<ng; i++) {
1104     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1105     else starts[i] = starts[i-1];
1106   }
1107 
1108   /* for each nontrival globally owned node list all arriving processors */
1109   for (i=0; i<nrecvs; i++) {
1110     for (j=0; j<len[i]; j++) {
1111       node = recvs[2*i*nmax+2*j]-rstart;
1112       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1113     }
1114   }
1115 
1116   if (debug) { /* -----------------------------------  */
1117     starts[0] = 0;
1118     for (i=1; i<ng; i++) {
1119       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1120       else starts[i] = starts[i-1];
1121     }
1122     for (i=0; i<ng; i++) {
1123       if (nownedsenders[i] > 1) {
1124         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1125         for (j=0; j<nownedsenders[i]; j++) {
1126           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1127         }
1128         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1129       }
1130     }
1131     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1132   } /* -----------------------------------  */
1133 
1134   /* wait on original sends */
1135   if (nsends) {
1136     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1137     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1138     ierr = PetscFree(send_status);CHKERRQ(ierr);
1139   }
1140   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1141   ierr = PetscFree(sends);CHKERRQ(ierr);
1142   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1143 
1144   /* pack messages to send back to local owners */
1145   starts[0] = 0;
1146   for (i=1; i<ng; i++) {
1147     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1148     else starts[i] = starts[i-1];
1149   }
1150   nsends2 = nrecvs;
1151   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1152   for (i=0; i<nrecvs; i++) {
1153     nprocs[i] = 1;
1154     for (j=0; j<len[i]; j++) {
1155       node = recvs[2*i*nmax+2*j]-rstart;
1156       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1157     }
1158   }
1159   nt = 0;
1160   for (i=0; i<nsends2; i++) nt += nprocs[i];
1161 
1162   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1163   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1164 
1165   starts2[0] = 0;
1166   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1167   /*
1168      Each message is 1 + nprocs[i] long, and consists of
1169        (0) the number of nodes being sent back
1170        (1) the local node number,
1171        (2) the number of processors sharing it,
1172        (3) the processors sharing it
1173   */
1174   for (i=0; i<nsends2; i++) {
1175     cnt = 1;
1176     sends2[starts2[i]] = 0;
1177     for (j=0; j<len[i]; j++) {
1178       node = recvs[2*i*nmax+2*j]-rstart;
1179       if (nownedsenders[node] > 1) {
1180         sends2[starts2[i]]++;
1181         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1182         sends2[starts2[i]+cnt++] = nownedsenders[node];
1183         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1184         cnt += nownedsenders[node];
1185       }
1186     }
1187   }
1188 
1189   /* receive the message lengths */
1190   nrecvs2 = nsends;
1191   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1192   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1193   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1194   for (i=0; i<nrecvs2; i++) {
1195     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1196   }
1197 
1198   /* send the message lengths */
1199   for (i=0; i<nsends2; i++) {
1200     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1201   }
1202 
1203   /* wait on receives of lens */
1204   if (nrecvs2) {
1205     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1206     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1207     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1208   }
1209   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1210 
1211   starts3[0] = 0;
1212   nt         = 0;
1213   for (i=0; i<nrecvs2-1; i++) {
1214     starts3[i+1] = starts3[i] + lens2[i];
1215     nt          += lens2[i];
1216   }
1217   if (nrecvs2) nt += lens2[nrecvs2-1];
1218 
1219   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1220   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1221   for (i=0; i<nrecvs2; i++) {
1222     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1223   }
1224 
1225   /* send the messages */
1226   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1227   for (i=0; i<nsends2; i++) {
1228     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1229   }
1230 
1231   /* wait on receives */
1232   if (nrecvs2) {
1233     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1234     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1235     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1236   }
1237   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1238   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1239 
1240   if (debug) { /* -----------------------------------  */
1241     cnt = 0;
1242     for (i=0; i<nrecvs2; i++) {
1243       nt = recvs2[cnt++];
1244       for (j=0; j<nt; j++) {
1245         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1246         for (k=0; k<recvs2[cnt+1]; k++) {
1247           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1248         }
1249         cnt += 2 + recvs2[cnt+1];
1250         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1251       }
1252     }
1253     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1254   } /* -----------------------------------  */
1255 
1256   /* count number subdomains for each local node */
1257   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1258   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1259   cnt  = 0;
1260   for (i=0; i<nrecvs2; i++) {
1261     nt = recvs2[cnt++];
1262     for (j=0; j<nt; j++) {
1263       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1264       cnt += 2 + recvs2[cnt+1];
1265     }
1266   }
1267   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1268   *nproc    = nt;
1269   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1270   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1271   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1272   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1273   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1274   cnt  = 0;
1275   for (i=0; i<size; i++) {
1276     if (nprocs[i] > 0) {
1277       bprocs[i]        = cnt;
1278       (*procs)[cnt]    = i;
1279       (*numprocs)[cnt] = nprocs[i];
1280       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1281       cnt++;
1282     }
1283   }
1284 
1285   /* make the list of subdomains for each nontrivial local node */
1286   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1287   cnt  = 0;
1288   for (i=0; i<nrecvs2; i++) {
1289     nt = recvs2[cnt++];
1290     for (j=0; j<nt; j++) {
1291       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1292       cnt += 2 + recvs2[cnt+1];
1293     }
1294   }
1295   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1296   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1297 
1298   /* sort the node indexing by their global numbers */
1299   nt = *nproc;
1300   for (i=0; i<nt; i++) {
1301     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1302     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1303     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1304     ierr = PetscFree(tmp);CHKERRQ(ierr);
1305   }
1306 
1307   if (debug) { /* -----------------------------------  */
1308     nt = *nproc;
1309     for (i=0; i<nt; i++) {
1310       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1311       for (j=0; j<(*numprocs)[i]; j++) {
1312         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1313       }
1314       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1315     }
1316     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1317   } /* -----------------------------------  */
1318 
1319   /* wait on sends */
1320   if (nsends2) {
1321     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1322     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1323     ierr = PetscFree(send_status);CHKERRQ(ierr);
1324   }
1325 
1326   ierr = PetscFree(starts3);CHKERRQ(ierr);
1327   ierr = PetscFree(dest);CHKERRQ(ierr);
1328   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1329 
1330   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1331   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1332   ierr = PetscFree(starts);CHKERRQ(ierr);
1333   ierr = PetscFree(starts2);CHKERRQ(ierr);
1334   ierr = PetscFree(lens2);CHKERRQ(ierr);
1335 
1336   ierr = PetscFree(source);CHKERRQ(ierr);
1337   ierr = PetscFree(len);CHKERRQ(ierr);
1338   ierr = PetscFree(recvs);CHKERRQ(ierr);
1339   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1340   ierr = PetscFree(sends2);CHKERRQ(ierr);
1341 
1342   /* put the information about myself as the first entry in the list */
1343   first_procs    = (*procs)[0];
1344   first_numprocs = (*numprocs)[0];
1345   first_indices  = (*indices)[0];
1346   for (i=0; i<*nproc; i++) {
1347     if ((*procs)[i] == rank) {
1348       (*procs)[0]    = (*procs)[i];
1349       (*numprocs)[0] = (*numprocs)[i];
1350       (*indices)[0]  = (*indices)[i];
1351       (*procs)[i]    = first_procs;
1352       (*numprocs)[i] = first_numprocs;
1353       (*indices)[i]  = first_indices;
1354       break;
1355     }
1356   }
1357 
1358   /* save info for reuse */
1359   mapping->info_nproc = *nproc;
1360   mapping->info_procs = *procs;
1361   mapping->info_numprocs = *numprocs;
1362   mapping->info_indices = *indices;
1363   mapping->info_cached = PETSC_TRUE;
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 /*@C
1368     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1369 
1370     Collective on ISLocalToGlobalMapping
1371 
1372     Input Parameters:
1373 .   mapping - the mapping from local to global indexing
1374 
1375     Output Parameter:
1376 +   nproc - number of processors that are connected to this one
1377 .   proc - neighboring processors
1378 .   numproc - number of indices for each processor
1379 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1380 
1381     Level: advanced
1382 
1383 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1384           ISLocalToGlobalMappingGetInfo()
1385 @*/
1386 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1387 {
1388   PetscErrorCode ierr;
1389 
1390   PetscFunctionBegin;
1391   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1392   if (mapping->info_free) {
1393     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1394     if (*indices) {
1395       PetscInt i;
1396 
1397       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1398       for (i=1; i<*nproc; i++) {
1399         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1400       }
1401       ierr = PetscFree(*indices);CHKERRQ(ierr);
1402     }
1403   }
1404   *nproc = 0;
1405   *procs = NULL;
1406   *numprocs = NULL;
1407   *indices = NULL;
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 /*@C
1412     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1413      each index shared by more than one processor
1414 
1415     Collective on ISLocalToGlobalMapping
1416 
1417     Input Parameters:
1418 .   mapping - the mapping from local to global indexing
1419 
1420     Output Parameter:
1421 +   nproc - number of processors that are connected to this one
1422 .   proc - neighboring processors
1423 .   numproc - number of indices for each subdomain (processor)
1424 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1425 
1426     Level: advanced
1427 
1428     Concepts: mapping^local to global
1429 
1430     Fortran Usage:
1431 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1432 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1433           PetscInt indices[nproc][numprocmax],ierr)
1434         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1435         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1436 
1437 
1438 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1439           ISLocalToGlobalMappingRestoreInfo()
1440 @*/
1441 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1442 {
1443   PetscErrorCode ierr;
1444   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1445 
1446   PetscFunctionBegin;
1447   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1448   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1449   if (bs > 1) { /* we need to expand the cached info */
1450     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1451     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1452     for (i=0; i<*nproc; i++) {
1453       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1454       for (j=0; j<bnumprocs[i]; j++) {
1455         for (k=0; k<bs; k++) {
1456           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1457         }
1458       }
1459       (*numprocs)[i] = bnumprocs[i]*bs;
1460     }
1461     mapping->info_free = PETSC_TRUE;
1462   } else {
1463     *numprocs = bnumprocs;
1464     *indices  = bindices;
1465   }
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 /*@C
1470     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1471 
1472     Collective on ISLocalToGlobalMapping
1473 
1474     Input Parameters:
1475 .   mapping - the mapping from local to global indexing
1476 
1477     Output Parameter:
1478 +   nproc - number of processors that are connected to this one
1479 .   proc - neighboring processors
1480 .   numproc - number of indices for each processor
1481 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1482 
1483     Level: advanced
1484 
1485 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1486           ISLocalToGlobalMappingGetInfo()
1487 @*/
1488 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1489 {
1490   PetscErrorCode ierr;
1491 
1492   PetscFunctionBegin;
1493   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1494   PetscFunctionReturn(0);
1495 }
1496 
1497 /*@C
1498    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1499 
1500    Not Collective
1501 
1502    Input Arguments:
1503 . ltog - local to global mapping
1504 
1505    Output Arguments:
1506 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1507 
1508    Level: advanced
1509 
1510    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1511 
1512 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1513 @*/
1514 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1515 {
1516   PetscFunctionBegin;
1517   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1518   PetscValidPointer(array,2);
1519   if (ltog->bs == 1) {
1520     *array = ltog->indices;
1521   } else {
1522     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1523     const PetscInt *ii;
1524     PetscErrorCode ierr;
1525 
1526     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1527     *array = jj;
1528     k    = 0;
1529     ii   = ltog->indices;
1530     for (i=0; i<n; i++)
1531       for (j=0; j<bs; j++)
1532         jj[k++] = bs*ii[i] + j;
1533   }
1534   PetscFunctionReturn(0);
1535 }
1536 
1537 /*@C
1538    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1539 
1540    Not Collective
1541 
1542    Input Arguments:
1543 + ltog - local to global mapping
1544 - array - array of indices
1545 
1546    Level: advanced
1547 
1548 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1549 @*/
1550 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1551 {
1552   PetscFunctionBegin;
1553   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1554   PetscValidPointer(array,2);
1555   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1556 
1557   if (ltog->bs > 1) {
1558     PetscErrorCode ierr;
1559     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1560   }
1561   PetscFunctionReturn(0);
1562 }
1563 
1564 /*@C
1565    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1566 
1567    Not Collective
1568 
1569    Input Arguments:
1570 . ltog - local to global mapping
1571 
1572    Output Arguments:
1573 . array - array of indices
1574 
1575    Level: advanced
1576 
1577 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1578 @*/
1579 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1580 {
1581   PetscFunctionBegin;
1582   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1583   PetscValidPointer(array,2);
1584   *array = ltog->indices;
1585   PetscFunctionReturn(0);
1586 }
1587 
1588 /*@C
1589    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1590 
1591    Not Collective
1592 
1593    Input Arguments:
1594 + ltog - local to global mapping
1595 - array - array of indices
1596 
1597    Level: advanced
1598 
1599 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1600 @*/
1601 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1602 {
1603   PetscFunctionBegin;
1604   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1605   PetscValidPointer(array,2);
1606   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1607   *array = NULL;
1608   PetscFunctionReturn(0);
1609 }
1610 
1611 /*@C
1612    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1613 
1614    Not Collective
1615 
1616    Input Arguments:
1617 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1618 . n - number of mappings to concatenate
1619 - ltogs - local to global mappings
1620 
1621    Output Arguments:
1622 . ltogcat - new mapping
1623 
1624    Note: this currently always returns a mapping with block size of 1
1625 
1626    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1627 
1628    Level: advanced
1629 
1630 .seealso: ISLocalToGlobalMappingCreate()
1631 @*/
1632 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1633 {
1634   PetscInt       i,cnt,m,*idx;
1635   PetscErrorCode ierr;
1636 
1637   PetscFunctionBegin;
1638   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1639   if (n > 0) PetscValidPointer(ltogs,3);
1640   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1641   PetscValidPointer(ltogcat,4);
1642   for (cnt=0,i=0; i<n; i++) {
1643     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1644     cnt += m;
1645   }
1646   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1647   for (cnt=0,i=0; i<n; i++) {
1648     const PetscInt *subidx;
1649     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1650     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1651     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1652     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1653     cnt += m;
1654   }
1655   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1656   PetscFunctionReturn(0);
1657 }
1658 
1659 
1660