xref: /petsc/src/vec/is/utils/isltog.c (revision 9895aa37ac365bac650f6bd8bf977519f7222510)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscviewer.h>
4 
5 PetscClassId IS_LTOGM_CLASSID;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "ISL2GMapApply"
9 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
10 {
11   PetscErrorCode ierr;
12   PetscInt       i,*globals = mapping->globals,start = mapping->globalstart,end = mapping->globalend;
13 
14   PetscFunctionBegin;
15   if (!mapping->globals) {
16     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
17   }
18   for (i=0; i<n; i++) {
19     if (in[i] < 0)          out[i] = in[i];
20     else if (in[i] < start) out[i] = -1;
21     else if (in[i] > end)   out[i] = -1;
22     else                    out[i] = globals[in[i] - start];
23   }
24   PetscFunctionReturn(0);
25 }
26 
27 
28 #undef __FUNCT__
29 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
30 /*@C
31     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
32 
33     Not Collective
34 
35     Input Parameter:
36 .   ltog - local to global mapping
37 
38     Output Parameter:
39 .   n - the number of entries in the local mapping
40 
41     Level: advanced
42 
43     Concepts: mapping^local to global
44 
45 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
46 @*/
47 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
48 {
49   PetscFunctionBegin;
50   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
51   PetscValidIntPointer(n,2);
52   *n = mapping->n;
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "ISLocalToGlobalMappingView"
58 /*@C
59     ISLocalToGlobalMappingView - View a local to global mapping
60 
61     Not Collective
62 
63     Input Parameters:
64 +   ltog - local to global mapping
65 -   viewer - viewer
66 
67     Level: advanced
68 
69     Concepts: mapping^local to global
70 
71 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
72 @*/
73 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
74 {
75   PetscInt       i;
76   PetscMPIInt    rank;
77   PetscBool      iascii;
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
82   if (!viewer) {
83     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
84   }
85   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
86 
87   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
88   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
89   if (iascii) {
90     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
91     for (i=0; i<mapping->n; i++) {
92       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
93     }
94     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
96   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
97   PetscFunctionReturn(0);
98 }
99 
100 #undef __FUNCT__
101 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
102 /*@
103     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
104     ordering and a global parallel ordering.
105 
106     Not collective
107 
108     Input Parameter:
109 .   is - index set containing the global numbers for each local number
110 
111     Output Parameter:
112 .   mapping - new mapping data structure
113 
114     Level: advanced
115 
116     Concepts: mapping^local to global
117 
118 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
119 @*/
120 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
121 {
122   PetscErrorCode ierr;
123   PetscInt       n;
124   const PetscInt *indices;
125   MPI_Comm       comm;
126 
127   PetscFunctionBegin;
128   PetscValidHeaderSpecific(is,IS_CLASSID,1);
129   PetscValidPointer(mapping,2);
130 
131   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
132   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
133   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
134   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
135   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
136   PetscFunctionReturn(0);
137 }
138 
139 #undef __FUNCT__
140 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
141 /*@C
142     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
143     ordering and a global parallel ordering.
144 
145     Collective
146 
147     Input Parameter:
148 +   sf - star forest mapping contiguous local indices to (rank, offset)
149 -   start - first global index on this process
150 
151     Output Parameter:
152 .   mapping - new mapping data structure
153 
154     Level: advanced
155 
156     Concepts: mapping^local to global
157 
158 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
159 @*/
160 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
161 {
162   PetscErrorCode ierr;
163   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
164   const PetscInt *ilocal;
165   MPI_Comm       comm;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
169   PetscValidPointer(mapping,3);
170 
171   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
172   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
173   if (ilocal) {
174     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
175   }
176   else maxlocal = nleaves;
177   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
178   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
179   for (i=0; i<nroots; i++) globals[i] = start + i;
180   for (i=0; i<maxlocal; i++) ltog[i] = -1;
181   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
182   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
183   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
184   ierr = PetscFree(globals);CHKERRQ(ierr);
185   PetscFunctionReturn(0);
186 }
187 
188 #undef __FUNCT__
189 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
190 /*@
191     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
192     ordering and a global parallel ordering.
193 
194     Not Collective, but communicator may have more than one process
195 
196     Input Parameters:
197 +   comm - MPI communicator
198 .   n - the number of local elements
199 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
200 -   mode - see PetscCopyMode
201 
202     Output Parameter:
203 .   mapping - new mapping data structure
204 
205     Level: advanced
206 
207     Concepts: mapping^local to global
208 
209 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
210 @*/
211 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
212 {
213   PetscErrorCode ierr;
214   PetscInt       *in;
215 
216   PetscFunctionBegin;
217   if (n) PetscValidIntPointer(indices,3);
218   PetscValidPointer(mapping,4);
219 
220   *mapping = NULL;
221 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
222   ierr = ISInitializePackage(NULL);CHKERRQ(ierr);
223 #endif
224 
225   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
226                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
227   (*mapping)->n = n;
228   /*
229     Do not create the global to local mapping. This is only created if
230     ISGlobalToLocalMapping() is called
231   */
232   (*mapping)->globals = 0;
233   if (mode == PETSC_COPY_VALUES) {
234     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
235     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
236     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
237     (*mapping)->indices = in;
238   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
239   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
240   PetscFunctionReturn(0);
241 }
242 
243 #undef __FUNCT__
244 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
245 /*@
246     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
247        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
248        and VecSetLocalToGlobalMappingBlock().
249 
250     Not Collective, but communicator may have more than one process
251 
252     Input Parameters:
253 +    inmap - original point-wise mapping
254 -    bs - block size
255 
256     Output Parameter:
257 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
258 
259     Level: advanced
260 
261     Concepts: mapping^local to global
262 
263 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
264 @*/
265 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
266 {
267   PetscErrorCode ierr;
268   PetscInt       *ii,i,n;
269 
270   PetscFunctionBegin;
271   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
272   PetscValidPointer(outmap,3);
273   if (bs > 1) {
274     n = inmap->n/bs;
275     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
276     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
277     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
278     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
279   } else {
280     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
281     *outmap = inmap;
282   }
283   PetscFunctionReturn(0);
284 }
285 
286 #undef __FUNCT__
287 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
288 /*@
289     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
290        ISLocalToGlobalMapping
291 
292     Not Collective, but communicator may have more than one process
293 
294     Input Parameter:
295 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
296 - bs - block size
297 
298     Output Parameter:
299 .   outmap - pointwise mapping
300 
301     Level: advanced
302 
303     Concepts: mapping^local to global
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
308 {
309   PetscErrorCode ierr;
310   PetscInt       *ii,i,n;
311 
312   PetscFunctionBegin;
313   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
314   PetscValidPointer(outmap,2);
315   if (bs > 1) {
316     n    = inmap->n*bs;
317     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
318     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
319     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
320   } else {
321     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
322     *outmap = inmap;
323   }
324   PetscFunctionReturn(0);
325 }
326 
327 #undef __FUNCT__
328 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
329 /*@
330    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
331    ordering and a global parallel ordering.
332 
333    Note Collective
334 
335    Input Parameters:
336 .  mapping - mapping data structure
337 
338    Level: advanced
339 
340 .seealso: ISLocalToGlobalMappingCreate()
341 @*/
342 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
343 {
344   PetscErrorCode ierr;
345 
346   PetscFunctionBegin;
347   if (!*mapping) PetscFunctionReturn(0);
348   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
349   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
350   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
351   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
352   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
353   *mapping = 0;
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
359 /*@
360     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
361     a new index set using the global numbering defined in an ISLocalToGlobalMapping
362     context.
363 
364     Not collective
365 
366     Input Parameters:
367 +   mapping - mapping between local and global numbering
368 -   is - index set in local numbering
369 
370     Output Parameters:
371 .   newis - index set in global numbering
372 
373     Level: advanced
374 
375     Concepts: mapping^local to global
376 
377 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
378           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
379 @*/
380 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
381 {
382   PetscErrorCode ierr;
383   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
384   const PetscInt *idxin;
385 
386   PetscFunctionBegin;
387   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
388   PetscValidHeaderSpecific(is,IS_CLASSID,2);
389   PetscValidPointer(newis,3);
390 
391   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
392   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
393   idxmap = mapping->indices;
394 
395   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
396   for (i=0; i<n; i++) {
397     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
398     idxout[i] = idxmap[idxin[i]];
399   }
400   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
401   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "ISLocalToGlobalMappingApply"
407 /*@
408    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
409    and converts them to the global numbering.
410 
411    Not collective
412 
413    Input Parameters:
414 +  mapping - the local to global mapping context
415 .  N - number of integers
416 -  in - input indices in local numbering
417 
418    Output Parameter:
419 .  out - indices in global numbering
420 
421    Notes:
422    The in and out array parameters may be identical.
423 
424    Level: advanced
425 
426 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
427           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
428           AOPetscToApplication(), ISGlobalToLocalMappingApply()
429 
430     Concepts: mapping^local to global
431 @*/
432 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
433 {
434   PetscInt       i,Nmax = mapping->n;
435   const PetscInt *idx = mapping->indices;
436 
437   PetscFunctionBegin;
438   for (i=0; i<N; i++) {
439     if (in[i] < 0) {
440       out[i] = in[i];
441       continue;
442     }
443     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
444     out[i] = idx[in[i]];
445   }
446   PetscFunctionReturn(0);
447 }
448 
449 /* -----------------------------------------------------------------------------------------*/
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
453 /*
454     Creates the global fields in the ISLocalToGlobalMapping structure
455 */
456 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
457 {
458   PetscErrorCode ierr;
459   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
460 
461   PetscFunctionBegin;
462   end   = 0;
463   start = PETSC_MAX_INT;
464 
465   for (i=0; i<n; i++) {
466     if (idx[i] < 0) continue;
467     if (idx[i] < start) start = idx[i];
468     if (idx[i] > end)   end   = idx[i];
469   }
470   if (start > end) {start = 0; end = -1;}
471   mapping->globalstart = start;
472   mapping->globalend   = end;
473 
474   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
475   mapping->globals = globals;
476   for (i=0; i<end-start+1; i++) globals[i] = -1;
477   for (i=0; i<n; i++) {
478     if (idx[i] < 0) continue;
479     globals[idx[i] - start] = i;
480   }
481 
482   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
483   PetscFunctionReturn(0);
484 }
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "ISGlobalToLocalMappingApply"
488 /*@
489     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
490     specified with a global numbering.
491 
492     Not collective
493 
494     Input Parameters:
495 +   mapping - mapping between local and global numbering
496 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
497            IS_GTOLM_DROP - drops the indices with no local value from the output list
498 .   n - number of global indices to map
499 -   idx - global indices to map
500 
501     Output Parameters:
502 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
503 -   idxout - local index of each global index, one must pass in an array long enough
504              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
505              idxout == NULL to determine the required length (returned in nout)
506              and then allocate the required space and call ISGlobalToLocalMappingApply()
507              a second time to set the values.
508 
509     Notes:
510     Either nout or idxout may be NULL. idx and idxout may be identical.
511 
512     This is not scalable in memory usage. Each processor requires O(Nglobal) size
513     array to compute these.
514 
515     Level: advanced
516 
517     Developer Note: The manual page states that idx and idxout may be identical but the calling
518        sequence declares idx as const so it cannot be the same as idxout.
519 
520     Concepts: mapping^global to local
521 
522 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
523           ISLocalToGlobalMappingDestroy()
524 @*/
525 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
526                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
527 {
528   PetscInt       i,*globals,nf = 0,tmp,start,end;
529   PetscErrorCode ierr;
530 
531   PetscFunctionBegin;
532   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
533   if (!mapping->globals) {
534     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
535   }
536   globals = mapping->globals;
537   start   = mapping->globalstart;
538   end     = mapping->globalend;
539 
540   if (type == IS_GTOLM_MASK) {
541     if (idxout) {
542       for (i=0; i<n; i++) {
543         if (idx[i] < 0) idxout[i] = idx[i];
544         else if (idx[i] < start) idxout[i] = -1;
545         else if (idx[i] > end)   idxout[i] = -1;
546         else                     idxout[i] = globals[idx[i] - start];
547       }
548     }
549     if (nout) *nout = n;
550   } else {
551     if (idxout) {
552       for (i=0; i<n; i++) {
553         if (idx[i] < 0) continue;
554         if (idx[i] < start) continue;
555         if (idx[i] > end) continue;
556         tmp = globals[idx[i] - start];
557         if (tmp < 0) continue;
558         idxout[nf++] = tmp;
559       }
560     } else {
561       for (i=0; i<n; i++) {
562         if (idx[i] < 0) continue;
563         if (idx[i] < start) continue;
564         if (idx[i] > end) continue;
565         tmp = globals[idx[i] - start];
566         if (tmp < 0) continue;
567         nf++;
568       }
569     }
570     if (nout) *nout = nf;
571   }
572   PetscFunctionReturn(0);
573 }
574 
575 #undef __FUNCT__
576 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
577 /*@C
578     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
579      each index shared by more than one processor
580 
581     Collective on ISLocalToGlobalMapping
582 
583     Input Parameters:
584 .   mapping - the mapping from local to global indexing
585 
586     Output Parameter:
587 +   nproc - number of processors that are connected to this one
588 .   proc - neighboring processors
589 .   numproc - number of indices for each subdomain (processor)
590 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
591 
592     Level: advanced
593 
594     Concepts: mapping^local to global
595 
596     Fortran Usage:
597 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
598 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
599           PetscInt indices[nproc][numprocmax],ierr)
600         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
601         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
602 
603 
604 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
605           ISLocalToGlobalMappingRestoreInfo()
606 @*/
607 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
608 {
609   PetscErrorCode ierr;
610   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
611   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
612   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
613   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
614   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
615   PetscInt       first_procs,first_numprocs,*first_indices;
616   MPI_Request    *recv_waits,*send_waits;
617   MPI_Status     recv_status,*send_status,*recv_statuses;
618   MPI_Comm       comm;
619   PetscBool      debug = PETSC_FALSE;
620 
621   PetscFunctionBegin;
622   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
623   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
624   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
625   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
626   if (size == 1) {
627     *nproc         = 0;
628     *procs         = NULL;
629     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
630     (*numprocs)[0] = 0;
631     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
632     (*indices)[0]  = NULL;
633     PetscFunctionReturn(0);
634   }
635 
636   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
637 
638   /*
639     Notes on ISLocalToGlobalMappingGetInfo
640 
641     globally owned node - the nodes that have been assigned to this processor in global
642            numbering, just for this routine.
643 
644     nontrivial globally owned node - node assigned to this processor that is on a subdomain
645            boundary (i.e. is has more than one local owner)
646 
647     locally owned node - node that exists on this processors subdomain
648 
649     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
650            local subdomain
651   */
652   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
653   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
654   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
655 
656   for (i=0; i<n; i++) {
657     if (lindices[i] > max) max = lindices[i];
658   }
659   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
660   Ng++;
661   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
662   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
663   scale  = Ng/size + 1;
664   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
665   rstart = scale*rank;
666 
667   /* determine ownership ranges of global indices */
668   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
669   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
670 
671   /* determine owners of each local node  */
672   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
673   for (i=0; i<n; i++) {
674     proc             = lindices[i]/scale; /* processor that globally owns this index */
675     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
676     owner[i]         = proc;
677     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
678   }
679   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
680   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
681 
682   /* inform other processors of number of messages and max length*/
683   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
684   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
685 
686   /* post receives for owned rows */
687   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
688   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
689   for (i=0; i<nrecvs; i++) {
690     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
691   }
692 
693   /* pack messages containing lists of local nodes to owners */
694   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
695   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
696   starts[0] = 0;
697   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
698   for (i=0; i<n; i++) {
699     sends[starts[owner[i]]++] = lindices[i];
700     sends[starts[owner[i]]++] = i;
701   }
702   ierr = PetscFree(owner);CHKERRQ(ierr);
703   starts[0] = 0;
704   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
705 
706   /* send the messages */
707   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
708   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
709   cnt = 0;
710   for (i=0; i<size; i++) {
711     if (nprocs[2*i]) {
712       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
713       dest[cnt] = i;
714       cnt++;
715     }
716   }
717   ierr = PetscFree(starts);CHKERRQ(ierr);
718 
719   /* wait on receives */
720   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
721   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
722   cnt  = nrecvs;
723   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
724   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
725   while (cnt) {
726     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
727     /* unpack receives into our local space */
728     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
729     source[imdex] = recv_status.MPI_SOURCE;
730     len[imdex]    = len[imdex]/2;
731     /* count how many local owners for each of my global owned indices */
732     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
733     cnt--;
734   }
735   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
736 
737   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
738   nowned  = 0;
739   nownedm = 0;
740   for (i=0; i<ng; i++) {
741     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
742   }
743 
744   /* create single array to contain rank of all local owners of each globally owned index */
745   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
746   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
747   starts[0] = 0;
748   for (i=1; i<ng; i++) {
749     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
750     else starts[i] = starts[i-1];
751   }
752 
753   /* for each nontrival globally owned node list all arriving processors */
754   for (i=0; i<nrecvs; i++) {
755     for (j=0; j<len[i]; j++) {
756       node = recvs[2*i*nmax+2*j]-rstart;
757       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
758     }
759   }
760 
761   if (debug) { /* -----------------------------------  */
762     starts[0] = 0;
763     for (i=1; i<ng; i++) {
764       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
765       else starts[i] = starts[i-1];
766     }
767     for (i=0; i<ng; i++) {
768       if (nownedsenders[i] > 1) {
769         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
770         for (j=0; j<nownedsenders[i]; j++) {
771           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
772         }
773         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
774       }
775     }
776     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
777   } /* -----------------------------------  */
778 
779   /* wait on original sends */
780   if (nsends) {
781     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
782     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
783     ierr = PetscFree(send_status);CHKERRQ(ierr);
784   }
785   ierr = PetscFree(send_waits);CHKERRQ(ierr);
786   ierr = PetscFree(sends);CHKERRQ(ierr);
787   ierr = PetscFree(nprocs);CHKERRQ(ierr);
788 
789   /* pack messages to send back to local owners */
790   starts[0] = 0;
791   for (i=1; i<ng; i++) {
792     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
793     else starts[i] = starts[i-1];
794   }
795   nsends2 = nrecvs;
796   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
797   for (i=0; i<nrecvs; i++) {
798     nprocs[i] = 1;
799     for (j=0; j<len[i]; j++) {
800       node = recvs[2*i*nmax+2*j]-rstart;
801       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
802     }
803   }
804   nt = 0;
805   for (i=0; i<nsends2; i++) nt += nprocs[i];
806 
807   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
808   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
809 
810   starts2[0] = 0;
811   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
812   /*
813      Each message is 1 + nprocs[i] long, and consists of
814        (0) the number of nodes being sent back
815        (1) the local node number,
816        (2) the number of processors sharing it,
817        (3) the processors sharing it
818   */
819   for (i=0; i<nsends2; i++) {
820     cnt = 1;
821     sends2[starts2[i]] = 0;
822     for (j=0; j<len[i]; j++) {
823       node = recvs[2*i*nmax+2*j]-rstart;
824       if (nownedsenders[node] > 1) {
825         sends2[starts2[i]]++;
826         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
827         sends2[starts2[i]+cnt++] = nownedsenders[node];
828         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
829         cnt += nownedsenders[node];
830       }
831     }
832   }
833 
834   /* receive the message lengths */
835   nrecvs2 = nsends;
836   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
837   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
838   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
839   for (i=0; i<nrecvs2; i++) {
840     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
841   }
842 
843   /* send the message lengths */
844   for (i=0; i<nsends2; i++) {
845     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
846   }
847 
848   /* wait on receives of lens */
849   if (nrecvs2) {
850     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
851     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
852     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
853   }
854   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
855 
856   starts3[0] = 0;
857   nt         = 0;
858   for (i=0; i<nrecvs2-1; i++) {
859     starts3[i+1] = starts3[i] + lens2[i];
860     nt          += lens2[i];
861   }
862   if (nrecvs2) nt += lens2[nrecvs2-1];
863 
864   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
865   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
866   for (i=0; i<nrecvs2; i++) {
867     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
868   }
869 
870   /* send the messages */
871   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
872   for (i=0; i<nsends2; i++) {
873     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
874   }
875 
876   /* wait on receives */
877   if (nrecvs2) {
878     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
879     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
880     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
881   }
882   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
883   ierr = PetscFree(nprocs);CHKERRQ(ierr);
884 
885   if (debug) { /* -----------------------------------  */
886     cnt = 0;
887     for (i=0; i<nrecvs2; i++) {
888       nt = recvs2[cnt++];
889       for (j=0; j<nt; j++) {
890         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
891         for (k=0; k<recvs2[cnt+1]; k++) {
892           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
893         }
894         cnt += 2 + recvs2[cnt+1];
895         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
896       }
897     }
898     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
899   } /* -----------------------------------  */
900 
901   /* count number subdomains for each local node */
902   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
903   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
904   cnt  = 0;
905   for (i=0; i<nrecvs2; i++) {
906     nt = recvs2[cnt++];
907     for (j=0; j<nt; j++) {
908       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
909       cnt += 2 + recvs2[cnt+1];
910     }
911   }
912   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
913   *nproc    = nt;
914   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
915   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
916   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
917   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
918   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
919   cnt       = 0;
920   for (i=0; i<size; i++) {
921     if (nprocs[i] > 0) {
922       bprocs[i]        = cnt;
923       (*procs)[cnt]    = i;
924       (*numprocs)[cnt] = nprocs[i];
925       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
926       cnt++;
927     }
928   }
929 
930   /* make the list of subdomains for each nontrivial local node */
931   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
932   cnt  = 0;
933   for (i=0; i<nrecvs2; i++) {
934     nt = recvs2[cnt++];
935     for (j=0; j<nt; j++) {
936       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
937       cnt += 2 + recvs2[cnt+1];
938     }
939   }
940   ierr = PetscFree(bprocs);CHKERRQ(ierr);
941   ierr = PetscFree(recvs2);CHKERRQ(ierr);
942 
943   /* sort the node indexing by their global numbers */
944   nt = *nproc;
945   for (i=0; i<nt; i++) {
946     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
947     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
948     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
949     ierr = PetscFree(tmp);CHKERRQ(ierr);
950   }
951 
952   if (debug) { /* -----------------------------------  */
953     nt = *nproc;
954     for (i=0; i<nt; i++) {
955       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
956       for (j=0; j<(*numprocs)[i]; j++) {
957         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
958       }
959       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
960     }
961     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
962   } /* -----------------------------------  */
963 
964   /* wait on sends */
965   if (nsends2) {
966     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
967     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
968     ierr = PetscFree(send_status);CHKERRQ(ierr);
969   }
970 
971   ierr = PetscFree(starts3);CHKERRQ(ierr);
972   ierr = PetscFree(dest);CHKERRQ(ierr);
973   ierr = PetscFree(send_waits);CHKERRQ(ierr);
974 
975   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
976   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
977   ierr = PetscFree(starts);CHKERRQ(ierr);
978   ierr = PetscFree(starts2);CHKERRQ(ierr);
979   ierr = PetscFree(lens2);CHKERRQ(ierr);
980 
981   ierr = PetscFree(source);CHKERRQ(ierr);
982   ierr = PetscFree(len);CHKERRQ(ierr);
983   ierr = PetscFree(recvs);CHKERRQ(ierr);
984   ierr = PetscFree(nprocs);CHKERRQ(ierr);
985   ierr = PetscFree(sends2);CHKERRQ(ierr);
986 
987   /* put the information about myself as the first entry in the list */
988   first_procs    = (*procs)[0];
989   first_numprocs = (*numprocs)[0];
990   first_indices  = (*indices)[0];
991   for (i=0; i<*nproc; i++) {
992     if ((*procs)[i] == rank) {
993       (*procs)[0]    = (*procs)[i];
994       (*numprocs)[0] = (*numprocs)[i];
995       (*indices)[0]  = (*indices)[i];
996       (*procs)[i]    = first_procs;
997       (*numprocs)[i] = first_numprocs;
998       (*indices)[i]  = first_indices;
999       break;
1000     }
1001   }
1002   PetscFunctionReturn(0);
1003 }
1004 
1005 #undef __FUNCT__
1006 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1007 /*@C
1008     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1009 
1010     Collective on ISLocalToGlobalMapping
1011 
1012     Input Parameters:
1013 .   mapping - the mapping from local to global indexing
1014 
1015     Output Parameter:
1016 +   nproc - number of processors that are connected to this one
1017 .   proc - neighboring processors
1018 .   numproc - number of indices for each processor
1019 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1020 
1021     Level: advanced
1022 
1023 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1024           ISLocalToGlobalMappingGetInfo()
1025 @*/
1026 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1027 {
1028   PetscErrorCode ierr;
1029   PetscInt       i;
1030 
1031   PetscFunctionBegin;
1032   ierr = PetscFree(*procs);CHKERRQ(ierr);
1033   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1034   if (*indices) {
1035     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1036     for (i=1; i<*nproc; i++) {
1037       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1038     }
1039     ierr = PetscFree(*indices);CHKERRQ(ierr);
1040   }
1041   PetscFunctionReturn(0);
1042 }
1043 
1044 #undef __FUNCT__
1045 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1046 /*@C
1047    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1048 
1049    Not Collective
1050 
1051    Input Arguments:
1052 . ltog - local to global mapping
1053 
1054    Output Arguments:
1055 . array - array of indices
1056 
1057    Level: advanced
1058 
1059 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1060 @*/
1061 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1062 {
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1065   PetscValidPointer(array,2);
1066   *array = ltog->indices;
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 #undef __FUNCT__
1071 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1072 /*@C
1073    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1074 
1075    Not Collective
1076 
1077    Input Arguments:
1078 + ltog - local to global mapping
1079 - array - array of indices
1080 
1081    Level: advanced
1082 
1083 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1084 @*/
1085 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1086 {
1087   PetscFunctionBegin;
1088   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1089   PetscValidPointer(array,2);
1090   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1091   *array = NULL;
1092   PetscFunctionReturn(0);
1093 }
1094 
1095 #undef __FUNCT__
1096 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1097 /*@C
1098    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1099 
1100    Not Collective
1101 
1102    Input Arguments:
1103 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1104 . n - number of mappings to concatenate
1105 - ltogs - local to global mappings
1106 
1107    Output Arguments:
1108 . ltogcat - new mapping
1109 
1110    Level: advanced
1111 
1112 .seealso: ISLocalToGlobalMappingCreate()
1113 @*/
1114 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1115 {
1116   PetscInt       i,cnt,m,*idx;
1117   PetscErrorCode ierr;
1118 
1119   PetscFunctionBegin;
1120   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1121   if (n > 0) PetscValidPointer(ltogs,3);
1122   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1123   PetscValidPointer(ltogcat,4);
1124   for (cnt=0,i=0; i<n; i++) {
1125     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1126     cnt += m;
1127   }
1128   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1129   for (cnt=0,i=0; i<n; i++) {
1130     const PetscInt *subidx;
1131     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1132     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1133     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1134     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1135     cnt += m;
1136   }
1137   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1138   PetscFunctionReturn(0);
1139 }
1140