xref: /petsc/src/vec/is/utils/isltog.c (revision 2f6eced2a19e978d64f27de66fbc6c26cc5c7934)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 /*@
10     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   nltog - the duplicated local to global mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
27 {
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
32   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 /*@
37     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
38 
39     Not Collective
40 
41     Input Parameter:
42 .   ltog - local to global mapping
43 
44     Output Parameter:
45 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
54 {
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
57   PetscValidIntPointer(n,2);
58   *n = mapping->bs*mapping->n;
59   PetscFunctionReturn(0);
60 }
61 
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 /*@
106     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
107     ordering and a global parallel ordering.
108 
109     Not collective
110 
111     Input Parameter:
112 .   is - index set containing the global numbers for each local number
113 
114     Output Parameter:
115 .   mapping - new mapping data structure
116 
117     Notes: the block size of the IS determines the block size of the mapping
118     Level: advanced
119 
120     Concepts: mapping^local to global
121 
122 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
123 @*/
124 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
125 {
126   PetscErrorCode ierr;
127   PetscInt       n,bs;
128   const PetscInt *indices;
129   MPI_Comm       comm;
130   PetscBool      isblock;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(is,IS_CLASSID,1);
134   PetscValidPointer(mapping,2);
135 
136   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
137   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
138   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
139   if (!isblock) {
140     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
141     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
142     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
143   } else {
144     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
145     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
146     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
147     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 /*@C
153     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
154     ordering and a global parallel ordering.
155 
156     Collective
157 
158     Input Parameter:
159 +   sf - star forest mapping contiguous local indices to (rank, offset)
160 -   start - first global index on this process
161 
162     Output Parameter:
163 .   mapping - new mapping data structure
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
170 @*/
171 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
172 {
173   PetscErrorCode ierr;
174   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
175   const PetscInt *ilocal;
176   MPI_Comm       comm;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
180   PetscValidPointer(mapping,3);
181 
182   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
183   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
184   if (ilocal) {
185     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
186   }
187   else maxlocal = nleaves;
188   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
189   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
190   for (i=0; i<nroots; i++) globals[i] = start + i;
191   for (i=0; i<maxlocal; i++) ltog[i] = -1;
192   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
193   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
194   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
195   ierr = PetscFree(globals);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
201 
202     Not collective
203 
204     Input Parameters:
205 .   mapping - mapping data structure
206 .   bs - the blocksize
207 
208     Level: advanced
209 
210     Concepts: mapping^local to global
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
215 {
216   PetscInt       *nid;
217   const PetscInt *oid;
218   PetscInt       i,cn,on,obs,nn;
219   PetscErrorCode ierr;
220 
221   PetscFunctionBegin;
222   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
223   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
224   if (bs == mapping->bs) PetscFunctionReturn(0);
225   on  = mapping->n;
226   obs = mapping->bs;
227   oid = mapping->indices;
228   nn  = (on*obs)/bs;
229   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
230 
231   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
232   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
233   for (i=0;i<nn;i++) {
234     PetscInt j;
235     for (j=0,cn=0;j<bs-1;j++) {
236       if (oid[i*bs+j] < 0) { cn++; continue; }
237       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
238     }
239     if (oid[i*bs+j] < 0) cn++;
240     if (cn) {
241       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
242       nid[i] = -1;
243     } else {
244       nid[i] = oid[i*bs]/bs;
245     }
246   }
247   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
248 
249   mapping->n           = nn;
250   mapping->bs          = bs;
251   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
252   mapping->indices     = nid;
253   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
254   mapping->globalstart = 0;
255   mapping->globalend   = 0;
256   PetscFunctionReturn(0);
257 }
258 
259 /*@
260     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
261     ordering and a global parallel ordering.
262 
263     Not Collective
264 
265     Input Parameters:
266 .   mapping - mapping data structure
267 
268     Output Parameter:
269 .   bs - the blocksize
270 
271     Level: advanced
272 
273     Concepts: mapping^local to global
274 
275 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
276 @*/
277 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
278 {
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
281   *bs = mapping->bs;
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
287     ordering and a global parallel ordering.
288 
289     Not Collective, but communicator may have more than one process
290 
291     Input Parameters:
292 +   comm - MPI communicator
293 .   bs - the block size
294 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
295 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
296 -   mode - see PetscCopyMode
297 
298     Output Parameter:
299 .   mapping - new mapping data structure
300 
301     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
302     Level: advanced
303 
304     Concepts: mapping^local to global
305 
306 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
307 @*/
308 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
309 {
310   PetscErrorCode ierr;
311   PetscInt       *in;
312 
313   PetscFunctionBegin;
314   if (n) PetscValidIntPointer(indices,3);
315   PetscValidPointer(mapping,4);
316 
317   *mapping = NULL;
318   ierr = ISInitializePackage();CHKERRQ(ierr);
319 
320   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
321                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
322   (*mapping)->n             = n;
323   (*mapping)->bs            = bs;
324   (*mapping)->info_cached   = PETSC_FALSE;
325   (*mapping)->info_free     = PETSC_FALSE;
326   (*mapping)->info_procs    = NULL;
327   (*mapping)->info_numprocs = NULL;
328   (*mapping)->info_indices  = NULL;
329   /*
330     Do not create the global to local mapping. This is only created if
331     ISGlobalToLocalMapping() is called
332   */
333   (*mapping)->globals = 0;
334   if (mode == PETSC_COPY_VALUES) {
335     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
336     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
337     (*mapping)->indices = in;
338     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
339   } else if (mode == PETSC_OWN_POINTER) {
340     (*mapping)->indices = (PetscInt*)indices;
341     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
342   }
343   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
344   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
345   PetscFunctionReturn(0);
346 }
347 
348 /*@
349    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
350    ordering and a global parallel ordering.
351 
352    Note Collective
353 
354    Input Parameters:
355 .  mapping - mapping data structure
356 
357    Level: advanced
358 
359 .seealso: ISLocalToGlobalMappingCreate()
360 @*/
361 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
362 {
363   PetscErrorCode ierr;
364 
365   PetscFunctionBegin;
366   if (!*mapping) PetscFunctionReturn(0);
367   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
368   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
369   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
370   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
371   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
372   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
373   if ((*mapping)->info_indices) {
374     PetscInt i;
375 
376     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
377     for (i=1; i<(*mapping)->info_nproc; i++) {
378       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
379     }
380     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
381   }
382   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
383   *mapping = 0;
384   PetscFunctionReturn(0);
385 }
386 
387 /*@
388     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
389     a new index set using the global numbering defined in an ISLocalToGlobalMapping
390     context.
391 
392     Not collective
393 
394     Input Parameters:
395 +   mapping - mapping between local and global numbering
396 -   is - index set in local numbering
397 
398     Output Parameters:
399 .   newis - index set in global numbering
400 
401     Level: advanced
402 
403     Concepts: mapping^local to global
404 
405 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
406           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
407 @*/
408 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
409 {
410   PetscErrorCode ierr;
411   PetscInt       n,*idxout;
412   const PetscInt *idxin;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
416   PetscValidHeaderSpecific(is,IS_CLASSID,2);
417   PetscValidPointer(newis,3);
418 
419   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
420   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
421   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
422   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
423   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
424   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
425   PetscFunctionReturn(0);
426 }
427 
428 /*@
429    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
430    and converts them to the global numbering.
431 
432    Not collective
433 
434    Input Parameters:
435 +  mapping - the local to global mapping context
436 .  N - number of integers
437 -  in - input indices in local numbering
438 
439    Output Parameter:
440 .  out - indices in global numbering
441 
442    Notes:
443    The in and out array parameters may be identical.
444 
445    Level: advanced
446 
447 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
448           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
449           AOPetscToApplication(), ISGlobalToLocalMappingApply()
450 
451     Concepts: mapping^local to global
452 @*/
453 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
454 {
455   PetscInt i,bs,Nmax;
456 
457   PetscFunctionBegin;
458   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
459   bs   = mapping->bs;
460   Nmax = bs*mapping->n;
461   if (bs == 1) {
462     const PetscInt *idx = mapping->indices;
463     for (i=0; i<N; i++) {
464       if (in[i] < 0) {
465         out[i] = in[i];
466         continue;
467       }
468       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
469       out[i] = idx[in[i]];
470     }
471   } else {
472     const PetscInt *idx = mapping->indices;
473     for (i=0; i<N; i++) {
474       if (in[i] < 0) {
475         out[i] = in[i];
476         continue;
477       }
478       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
479       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
480     }
481   }
482   PetscFunctionReturn(0);
483 }
484 
485 /*@
486    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
487 
488    Not collective
489 
490    Input Parameters:
491 +  mapping - the local to global mapping context
492 .  N - number of integers
493 -  in - input indices in local block numbering
494 
495    Output Parameter:
496 .  out - indices in global block numbering
497 
498    Notes:
499    The in and out array parameters may be identical.
500 
501    Example:
502      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
503      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
504 
505    Level: advanced
506 
507 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
508           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
509           AOPetscToApplication(), ISGlobalToLocalMappingApply()
510 
511     Concepts: mapping^local to global
512 @*/
513 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
514 {
515 
516   PetscFunctionBegin;
517   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
518   {
519     PetscInt i,Nmax = mapping->n;
520     const PetscInt *idx = mapping->indices;
521 
522     for (i=0; i<N; i++) {
523       if (in[i] < 0) {
524         out[i] = in[i];
525         continue;
526       }
527       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
528       out[i] = idx[in[i]];
529     }
530   }
531   PetscFunctionReturn(0);
532 }
533 
534 /* -----------------------------------------------------------------------------------------*/
535 
536 /*
537     Creates the global fields in the ISLocalToGlobalMapping structure
538 */
539 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
540 {
541   PetscErrorCode ierr;
542   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
543 
544   PetscFunctionBegin;
545   end   = 0;
546   start = PETSC_MAX_INT;
547 
548   for (i=0; i<n; i++) {
549     if (idx[i] < 0) continue;
550     if (idx[i] < start) start = idx[i];
551     if (idx[i] > end)   end   = idx[i];
552   }
553   if (start > end) {start = 0; end = -1;}
554   mapping->globalstart = start;
555   mapping->globalend   = end;
556 
557   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
558   mapping->globals = globals;
559   for (i=0; i<end-start+1; i++) globals[i] = -1;
560   for (i=0; i<n; i++) {
561     if (idx[i] < 0) continue;
562     globals[idx[i] - start] = i;
563   }
564 
565   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
566   PetscFunctionReturn(0);
567 }
568 
569 /*@
570     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
571     specified with a global numbering.
572 
573     Not collective
574 
575     Input Parameters:
576 +   mapping - mapping between local and global numbering
577 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
578            IS_GTOLM_DROP - drops the indices with no local value from the output list
579 .   n - number of global indices to map
580 -   idx - global indices to map
581 
582     Output Parameters:
583 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
584 -   idxout - local index of each global index, one must pass in an array long enough
585              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
586              idxout == NULL to determine the required length (returned in nout)
587              and then allocate the required space and call ISGlobalToLocalMappingApply()
588              a second time to set the values.
589 
590     Notes:
591     Either nout or idxout may be NULL. idx and idxout may be identical.
592 
593     This is not scalable in memory usage. Each processor requires O(Nglobal) size
594     array to compute these.
595 
596     Level: advanced
597 
598     Developer Note: The manual page states that idx and idxout may be identical but the calling
599        sequence declares idx as const so it cannot be the same as idxout.
600 
601     Concepts: mapping^global to local
602 
603 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
604           ISLocalToGlobalMappingDestroy()
605 @*/
606 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
607                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
608 {
609   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
610   PetscErrorCode ierr;
611 
612   PetscFunctionBegin;
613   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
614   if (!mapping->globals) {
615     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
616   }
617   globals = mapping->globals;
618   start   = mapping->globalstart;
619   end     = mapping->globalend;
620   bs      = mapping->bs;
621 
622   if (type == IS_GTOLM_MASK) {
623     if (idxout) {
624       for (i=0; i<n; i++) {
625         if (idx[i] < 0)                   idxout[i] = idx[i];
626         else if (idx[i] < bs*start)       idxout[i] = -1;
627         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
628         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
629       }
630     }
631     if (nout) *nout = n;
632   } else {
633     if (idxout) {
634       for (i=0; i<n; i++) {
635         if (idx[i] < 0) continue;
636         if (idx[i] < bs*start) continue;
637         if (idx[i] > bs*(end+1)-1) continue;
638         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
639         if (tmp < 0) continue;
640         idxout[nf++] = tmp;
641       }
642     } else {
643       for (i=0; i<n; i++) {
644         if (idx[i] < 0) continue;
645         if (idx[i] < bs*start) continue;
646         if (idx[i] > bs*(end+1)-1) continue;
647         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
648         if (tmp < 0) continue;
649         nf++;
650       }
651     }
652     if (nout) *nout = nf;
653   }
654   PetscFunctionReturn(0);
655 }
656 
657 /*@
658     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
659     a new index set using the local numbering defined in an ISLocalToGlobalMapping
660     context.
661 
662     Not collective
663 
664     Input Parameters:
665 +   mapping - mapping between local and global numbering
666 -   is - index set in global numbering
667 
668     Output Parameters:
669 .   newis - index set in local numbering
670 
671     Level: advanced
672 
673     Concepts: mapping^local to global
674 
675 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
676           ISLocalToGlobalMappingDestroy()
677 @*/
678 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
679 {
680   PetscErrorCode ierr;
681   PetscInt       n,nout,*idxout;
682   const PetscInt *idxin;
683 
684   PetscFunctionBegin;
685   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
686   PetscValidHeaderSpecific(is,IS_CLASSID,3);
687   PetscValidPointer(newis,4);
688 
689   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
690   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
691   if (type == IS_GTOLM_MASK) {
692     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
693   } else {
694     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
695     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
696   }
697   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
698   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
699   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
700   PetscFunctionReturn(0);
701 }
702 
703 /*@
704     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
705     specified with a block global numbering.
706 
707     Not collective
708 
709     Input Parameters:
710 +   mapping - mapping between local and global numbering
711 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
712            IS_GTOLM_DROP - drops the indices with no local value from the output list
713 .   n - number of global indices to map
714 -   idx - global indices to map
715 
716     Output Parameters:
717 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
718 -   idxout - local index of each global index, one must pass in an array long enough
719              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
720              idxout == NULL to determine the required length (returned in nout)
721              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
722              a second time to set the values.
723 
724     Notes:
725     Either nout or idxout may be NULL. idx and idxout may be identical.
726 
727     This is not scalable in memory usage. Each processor requires O(Nglobal) size
728     array to compute these.
729 
730     Level: advanced
731 
732     Developer Note: The manual page states that idx and idxout may be identical but the calling
733        sequence declares idx as const so it cannot be the same as idxout.
734 
735     Concepts: mapping^global to local
736 
737 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
738           ISLocalToGlobalMappingDestroy()
739 @*/
740 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
741                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
742 {
743   PetscInt       i,*globals,nf = 0,tmp,start,end;
744   PetscErrorCode ierr;
745 
746   PetscFunctionBegin;
747   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
748   if (!mapping->globals) {
749     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
750   }
751   globals = mapping->globals;
752   start   = mapping->globalstart;
753   end     = mapping->globalend;
754 
755   if (type == IS_GTOLM_MASK) {
756     if (idxout) {
757       for (i=0; i<n; i++) {
758         if (idx[i] < 0) idxout[i] = idx[i];
759         else if (idx[i] < start) idxout[i] = -1;
760         else if (idx[i] > end)   idxout[i] = -1;
761         else                     idxout[i] = globals[idx[i] - start];
762       }
763     }
764     if (nout) *nout = n;
765   } else {
766     if (idxout) {
767       for (i=0; i<n; i++) {
768         if (idx[i] < 0) continue;
769         if (idx[i] < start) continue;
770         if (idx[i] > end) continue;
771         tmp = globals[idx[i] - start];
772         if (tmp < 0) continue;
773         idxout[nf++] = tmp;
774       }
775     } else {
776       for (i=0; i<n; i++) {
777         if (idx[i] < 0) continue;
778         if (idx[i] < start) continue;
779         if (idx[i] > end) continue;
780         tmp = globals[idx[i] - start];
781         if (tmp < 0) continue;
782         nf++;
783       }
784     }
785     if (nout) *nout = nf;
786   }
787   PetscFunctionReturn(0);
788 }
789 
790 /*@C
791     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
792      each index shared by more than one processor
793 
794     Collective on ISLocalToGlobalMapping
795 
796     Input Parameters:
797 .   mapping - the mapping from local to global indexing
798 
799     Output Parameter:
800 +   nproc - number of processors that are connected to this one
801 .   proc - neighboring processors
802 .   numproc - number of indices for each subdomain (processor)
803 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
804 
805     Level: advanced
806 
807     Concepts: mapping^local to global
808 
809     Fortran Usage:
810 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
811 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
812           PetscInt indices[nproc][numprocmax],ierr)
813         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
814         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
815 
816 
817 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
818           ISLocalToGlobalMappingRestoreInfo()
819 @*/
820 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
821 {
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
826   if (mapping->info_cached) {
827     *nproc = mapping->info_nproc;
828     *procs = mapping->info_procs;
829     *numprocs = mapping->info_numprocs;
830     *indices = mapping->info_indices;
831   } else {
832     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
833   }
834   PetscFunctionReturn(0);
835 }
836 
837 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
838 {
839   PetscErrorCode ierr;
840   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
841   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
842   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
843   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
844   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
845   PetscInt       first_procs,first_numprocs,*first_indices;
846   MPI_Request    *recv_waits,*send_waits;
847   MPI_Status     recv_status,*send_status,*recv_statuses;
848   MPI_Comm       comm;
849   PetscBool      debug = PETSC_FALSE;
850 
851   PetscFunctionBegin;
852   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
853   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
854   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
855   if (size == 1) {
856     *nproc         = 0;
857     *procs         = NULL;
858     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
859     (*numprocs)[0] = 0;
860     ierr           = PetscNew(indices);CHKERRQ(ierr);
861     (*indices)[0]  = NULL;
862     /* save info for reuse */
863     mapping->info_nproc = *nproc;
864     mapping->info_procs = *procs;
865     mapping->info_numprocs = *numprocs;
866     mapping->info_indices = *indices;
867     mapping->info_cached = PETSC_TRUE;
868     PetscFunctionReturn(0);
869   }
870 
871   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
872 
873   /*
874     Notes on ISLocalToGlobalMappingGetBlockInfo
875 
876     globally owned node - the nodes that have been assigned to this processor in global
877            numbering, just for this routine.
878 
879     nontrivial globally owned node - node assigned to this processor that is on a subdomain
880            boundary (i.e. is has more than one local owner)
881 
882     locally owned node - node that exists on this processors subdomain
883 
884     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
885            local subdomain
886   */
887   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
888   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
889   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
890 
891   for (i=0; i<n; i++) {
892     if (lindices[i] > max) max = lindices[i];
893   }
894   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
895   Ng++;
896   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
897   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
898   scale  = Ng/size + 1;
899   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
900   rstart = scale*rank;
901 
902   /* determine ownership ranges of global indices */
903   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
904   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
905 
906   /* determine owners of each local node  */
907   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
908   for (i=0; i<n; i++) {
909     proc             = lindices[i]/scale; /* processor that globally owns this index */
910     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
911     owner[i]         = proc;
912     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
913   }
914   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
915   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
916 
917   /* inform other processors of number of messages and max length*/
918   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
919   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
920 
921   /* post receives for owned rows */
922   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
923   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
924   for (i=0; i<nrecvs; i++) {
925     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
926   }
927 
928   /* pack messages containing lists of local nodes to owners */
929   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
930   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
931   starts[0] = 0;
932   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
933   for (i=0; i<n; i++) {
934     sends[starts[owner[i]]++] = lindices[i];
935     sends[starts[owner[i]]++] = i;
936   }
937   ierr = PetscFree(owner);CHKERRQ(ierr);
938   starts[0] = 0;
939   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
940 
941   /* send the messages */
942   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
943   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
944   cnt = 0;
945   for (i=0; i<size; i++) {
946     if (nprocs[2*i]) {
947       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
948       dest[cnt] = i;
949       cnt++;
950     }
951   }
952   ierr = PetscFree(starts);CHKERRQ(ierr);
953 
954   /* wait on receives */
955   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
956   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
957   cnt  = nrecvs;
958   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
959   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
960   while (cnt) {
961     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
962     /* unpack receives into our local space */
963     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
964     source[imdex] = recv_status.MPI_SOURCE;
965     len[imdex]    = len[imdex]/2;
966     /* count how many local owners for each of my global owned indices */
967     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
968     cnt--;
969   }
970   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
971 
972   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
973   nowned  = 0;
974   nownedm = 0;
975   for (i=0; i<ng; i++) {
976     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
977   }
978 
979   /* create single array to contain rank of all local owners of each globally owned index */
980   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
981   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
982   starts[0] = 0;
983   for (i=1; i<ng; i++) {
984     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
985     else starts[i] = starts[i-1];
986   }
987 
988   /* for each nontrival globally owned node list all arriving processors */
989   for (i=0; i<nrecvs; i++) {
990     for (j=0; j<len[i]; j++) {
991       node = recvs[2*i*nmax+2*j]-rstart;
992       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
993     }
994   }
995 
996   if (debug) { /* -----------------------------------  */
997     starts[0] = 0;
998     for (i=1; i<ng; i++) {
999       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1000       else starts[i] = starts[i-1];
1001     }
1002     for (i=0; i<ng; i++) {
1003       if (nownedsenders[i] > 1) {
1004         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1005         for (j=0; j<nownedsenders[i]; j++) {
1006           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1007         }
1008         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1009       }
1010     }
1011     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1012   } /* -----------------------------------  */
1013 
1014   /* wait on original sends */
1015   if (nsends) {
1016     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1017     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1018     ierr = PetscFree(send_status);CHKERRQ(ierr);
1019   }
1020   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1021   ierr = PetscFree(sends);CHKERRQ(ierr);
1022   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1023 
1024   /* pack messages to send back to local owners */
1025   starts[0] = 0;
1026   for (i=1; i<ng; i++) {
1027     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1028     else starts[i] = starts[i-1];
1029   }
1030   nsends2 = nrecvs;
1031   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1032   for (i=0; i<nrecvs; i++) {
1033     nprocs[i] = 1;
1034     for (j=0; j<len[i]; j++) {
1035       node = recvs[2*i*nmax+2*j]-rstart;
1036       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1037     }
1038   }
1039   nt = 0;
1040   for (i=0; i<nsends2; i++) nt += nprocs[i];
1041 
1042   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1043   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1044 
1045   starts2[0] = 0;
1046   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1047   /*
1048      Each message is 1 + nprocs[i] long, and consists of
1049        (0) the number of nodes being sent back
1050        (1) the local node number,
1051        (2) the number of processors sharing it,
1052        (3) the processors sharing it
1053   */
1054   for (i=0; i<nsends2; i++) {
1055     cnt = 1;
1056     sends2[starts2[i]] = 0;
1057     for (j=0; j<len[i]; j++) {
1058       node = recvs[2*i*nmax+2*j]-rstart;
1059       if (nownedsenders[node] > 1) {
1060         sends2[starts2[i]]++;
1061         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1062         sends2[starts2[i]+cnt++] = nownedsenders[node];
1063         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1064         cnt += nownedsenders[node];
1065       }
1066     }
1067   }
1068 
1069   /* receive the message lengths */
1070   nrecvs2 = nsends;
1071   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1072   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1073   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1074   for (i=0; i<nrecvs2; i++) {
1075     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1076   }
1077 
1078   /* send the message lengths */
1079   for (i=0; i<nsends2; i++) {
1080     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1081   }
1082 
1083   /* wait on receives of lens */
1084   if (nrecvs2) {
1085     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1086     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1087     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1088   }
1089   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1090 
1091   starts3[0] = 0;
1092   nt         = 0;
1093   for (i=0; i<nrecvs2-1; i++) {
1094     starts3[i+1] = starts3[i] + lens2[i];
1095     nt          += lens2[i];
1096   }
1097   if (nrecvs2) nt += lens2[nrecvs2-1];
1098 
1099   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1100   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1101   for (i=0; i<nrecvs2; i++) {
1102     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1103   }
1104 
1105   /* send the messages */
1106   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1107   for (i=0; i<nsends2; i++) {
1108     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1109   }
1110 
1111   /* wait on receives */
1112   if (nrecvs2) {
1113     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1114     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1115     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1116   }
1117   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1118   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1119 
1120   if (debug) { /* -----------------------------------  */
1121     cnt = 0;
1122     for (i=0; i<nrecvs2; i++) {
1123       nt = recvs2[cnt++];
1124       for (j=0; j<nt; j++) {
1125         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1126         for (k=0; k<recvs2[cnt+1]; k++) {
1127           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1128         }
1129         cnt += 2 + recvs2[cnt+1];
1130         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1131       }
1132     }
1133     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1134   } /* -----------------------------------  */
1135 
1136   /* count number subdomains for each local node */
1137   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1138   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1139   cnt  = 0;
1140   for (i=0; i<nrecvs2; i++) {
1141     nt = recvs2[cnt++];
1142     for (j=0; j<nt; j++) {
1143       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1144       cnt += 2 + recvs2[cnt+1];
1145     }
1146   }
1147   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1148   *nproc    = nt;
1149   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1150   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1151   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1152   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1153   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1154   cnt  = 0;
1155   for (i=0; i<size; i++) {
1156     if (nprocs[i] > 0) {
1157       bprocs[i]        = cnt;
1158       (*procs)[cnt]    = i;
1159       (*numprocs)[cnt] = nprocs[i];
1160       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1161       cnt++;
1162     }
1163   }
1164 
1165   /* make the list of subdomains for each nontrivial local node */
1166   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1167   cnt  = 0;
1168   for (i=0; i<nrecvs2; i++) {
1169     nt = recvs2[cnt++];
1170     for (j=0; j<nt; j++) {
1171       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1172       cnt += 2 + recvs2[cnt+1];
1173     }
1174   }
1175   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1176   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1177 
1178   /* sort the node indexing by their global numbers */
1179   nt = *nproc;
1180   for (i=0; i<nt; i++) {
1181     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1182     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1183     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1184     ierr = PetscFree(tmp);CHKERRQ(ierr);
1185   }
1186 
1187   if (debug) { /* -----------------------------------  */
1188     nt = *nproc;
1189     for (i=0; i<nt; i++) {
1190       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1191       for (j=0; j<(*numprocs)[i]; j++) {
1192         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1193       }
1194       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1195     }
1196     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1197   } /* -----------------------------------  */
1198 
1199   /* wait on sends */
1200   if (nsends2) {
1201     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1202     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1203     ierr = PetscFree(send_status);CHKERRQ(ierr);
1204   }
1205 
1206   ierr = PetscFree(starts3);CHKERRQ(ierr);
1207   ierr = PetscFree(dest);CHKERRQ(ierr);
1208   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1209 
1210   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1211   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1212   ierr = PetscFree(starts);CHKERRQ(ierr);
1213   ierr = PetscFree(starts2);CHKERRQ(ierr);
1214   ierr = PetscFree(lens2);CHKERRQ(ierr);
1215 
1216   ierr = PetscFree(source);CHKERRQ(ierr);
1217   ierr = PetscFree(len);CHKERRQ(ierr);
1218   ierr = PetscFree(recvs);CHKERRQ(ierr);
1219   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1220   ierr = PetscFree(sends2);CHKERRQ(ierr);
1221 
1222   /* put the information about myself as the first entry in the list */
1223   first_procs    = (*procs)[0];
1224   first_numprocs = (*numprocs)[0];
1225   first_indices  = (*indices)[0];
1226   for (i=0; i<*nproc; i++) {
1227     if ((*procs)[i] == rank) {
1228       (*procs)[0]    = (*procs)[i];
1229       (*numprocs)[0] = (*numprocs)[i];
1230       (*indices)[0]  = (*indices)[i];
1231       (*procs)[i]    = first_procs;
1232       (*numprocs)[i] = first_numprocs;
1233       (*indices)[i]  = first_indices;
1234       break;
1235     }
1236   }
1237 
1238   /* save info for reuse */
1239   mapping->info_nproc = *nproc;
1240   mapping->info_procs = *procs;
1241   mapping->info_numprocs = *numprocs;
1242   mapping->info_indices = *indices;
1243   mapping->info_cached = PETSC_TRUE;
1244   PetscFunctionReturn(0);
1245 }
1246 
1247 /*@C
1248     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1249 
1250     Collective on ISLocalToGlobalMapping
1251 
1252     Input Parameters:
1253 .   mapping - the mapping from local to global indexing
1254 
1255     Output Parameter:
1256 +   nproc - number of processors that are connected to this one
1257 .   proc - neighboring processors
1258 .   numproc - number of indices for each processor
1259 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1260 
1261     Level: advanced
1262 
1263 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1264           ISLocalToGlobalMappingGetInfo()
1265 @*/
1266 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1267 {
1268   PetscErrorCode ierr;
1269 
1270   PetscFunctionBegin;
1271   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1272   if (mapping->info_free) {
1273     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1274     if (*indices) {
1275       PetscInt i;
1276 
1277       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1278       for (i=1; i<*nproc; i++) {
1279         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1280       }
1281       ierr = PetscFree(*indices);CHKERRQ(ierr);
1282     }
1283   }
1284   *nproc = 0;
1285   *procs = NULL;
1286   *numprocs = NULL;
1287   *indices = NULL;
1288   PetscFunctionReturn(0);
1289 }
1290 
1291 /*@C
1292     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1293      each index shared by more than one processor
1294 
1295     Collective on ISLocalToGlobalMapping
1296 
1297     Input Parameters:
1298 .   mapping - the mapping from local to global indexing
1299 
1300     Output Parameter:
1301 +   nproc - number of processors that are connected to this one
1302 .   proc - neighboring processors
1303 .   numproc - number of indices for each subdomain (processor)
1304 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1305 
1306     Level: advanced
1307 
1308     Concepts: mapping^local to global
1309 
1310     Fortran Usage:
1311 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1312 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1313           PetscInt indices[nproc][numprocmax],ierr)
1314         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1315         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1316 
1317 
1318 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1319           ISLocalToGlobalMappingRestoreInfo()
1320 @*/
1321 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1322 {
1323   PetscErrorCode ierr;
1324   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1325 
1326   PetscFunctionBegin;
1327   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1328   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1329   if (bs > 1) { /* we need to expand the cached info */
1330     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1331     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1332     for (i=0; i<*nproc; i++) {
1333       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1334       for (j=0; j<bnumprocs[i]; j++) {
1335         for (k=0; k<bs; k++) {
1336           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1337         }
1338       }
1339       (*numprocs)[i] = bnumprocs[i]*bs;
1340     }
1341     mapping->info_free = PETSC_TRUE;
1342   } else {
1343     *numprocs = bnumprocs;
1344     *indices  = bindices;
1345   }
1346   PetscFunctionReturn(0);
1347 }
1348 
1349 /*@C
1350     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1351 
1352     Collective on ISLocalToGlobalMapping
1353 
1354     Input Parameters:
1355 .   mapping - the mapping from local to global indexing
1356 
1357     Output Parameter:
1358 +   nproc - number of processors that are connected to this one
1359 .   proc - neighboring processors
1360 .   numproc - number of indices for each processor
1361 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1362 
1363     Level: advanced
1364 
1365 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1366           ISLocalToGlobalMappingGetInfo()
1367 @*/
1368 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1369 {
1370   PetscErrorCode ierr;
1371 
1372   PetscFunctionBegin;
1373   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 /*@C
1378    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1379 
1380    Not Collective
1381 
1382    Input Arguments:
1383 . ltog - local to global mapping
1384 
1385    Output Arguments:
1386 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1387 
1388    Level: advanced
1389 
1390    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1391 
1392 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1393 @*/
1394 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1395 {
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1398   PetscValidPointer(array,2);
1399   if (ltog->bs == 1) {
1400     *array = ltog->indices;
1401   } else {
1402     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1403     const PetscInt *ii;
1404     PetscErrorCode ierr;
1405 
1406     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1407     *array = jj;
1408     k    = 0;
1409     ii   = ltog->indices;
1410     for (i=0; i<n; i++)
1411       for (j=0; j<bs; j++)
1412         jj[k++] = bs*ii[i] + j;
1413   }
1414   PetscFunctionReturn(0);
1415 }
1416 
1417 /*@C
1418    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1419 
1420    Not Collective
1421 
1422    Input Arguments:
1423 + ltog - local to global mapping
1424 - array - array of indices
1425 
1426    Level: advanced
1427 
1428 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1429 @*/
1430 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1431 {
1432   PetscFunctionBegin;
1433   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1434   PetscValidPointer(array,2);
1435   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1436 
1437   if (ltog->bs > 1) {
1438     PetscErrorCode ierr;
1439     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1440   }
1441   PetscFunctionReturn(0);
1442 }
1443 
1444 /*@C
1445    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1446 
1447    Not Collective
1448 
1449    Input Arguments:
1450 . ltog - local to global mapping
1451 
1452    Output Arguments:
1453 . array - array of indices
1454 
1455    Level: advanced
1456 
1457 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1458 @*/
1459 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1460 {
1461   PetscFunctionBegin;
1462   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1463   PetscValidPointer(array,2);
1464   *array = ltog->indices;
1465   PetscFunctionReturn(0);
1466 }
1467 
1468 /*@C
1469    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1470 
1471    Not Collective
1472 
1473    Input Arguments:
1474 + ltog - local to global mapping
1475 - array - array of indices
1476 
1477    Level: advanced
1478 
1479 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1480 @*/
1481 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1482 {
1483   PetscFunctionBegin;
1484   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1485   PetscValidPointer(array,2);
1486   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1487   *array = NULL;
1488   PetscFunctionReturn(0);
1489 }
1490 
1491 /*@C
1492    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1493 
1494    Not Collective
1495 
1496    Input Arguments:
1497 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1498 . n - number of mappings to concatenate
1499 - ltogs - local to global mappings
1500 
1501    Output Arguments:
1502 . ltogcat - new mapping
1503 
1504    Note: this currently always returns a mapping with block size of 1
1505 
1506    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1507 
1508    Level: advanced
1509 
1510 .seealso: ISLocalToGlobalMappingCreate()
1511 @*/
1512 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1513 {
1514   PetscInt       i,cnt,m,*idx;
1515   PetscErrorCode ierr;
1516 
1517   PetscFunctionBegin;
1518   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1519   if (n > 0) PetscValidPointer(ltogs,3);
1520   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1521   PetscValidPointer(ltogcat,4);
1522   for (cnt=0,i=0; i<n; i++) {
1523     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1524     cnt += m;
1525   }
1526   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1527   for (cnt=0,i=0; i<n; i++) {
1528     const PetscInt *subidx;
1529     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1530     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1531     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1532     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1533     cnt += m;
1534   }
1535   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 
1540