xref: /petsc/src/vec/is/utils/isltog.c (revision 186d4ecdc2f8f3d4a2a0951ddba16cdc5cd22fa1)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISL2GMapApply"
8 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
9 {
10   PetscErrorCode ierr;
11   PetscInt       i,*globals = mapping->globals,start = mapping->globalstart,end = mapping->globalend;
12 
13   PetscFunctionBegin;
14   if (!mapping->globals) {
15     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
16   }
17   for (i=0; i<n; i++) {
18     if (in[i] < 0)          out[i] = in[i];
19     else if (in[i] < start) out[i] = -1;
20     else if (in[i] > end)   out[i] = -1;
21     else                    out[i] = globals[in[i] - start];
22   }
23   PetscFunctionReturn(0);
24 }
25 
26 
27 #undef __FUNCT__
28 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
29 /*@C
30     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
31 
32     Not Collective
33 
34     Input Parameter:
35 .   ltog - local to global mapping
36 
37     Output Parameter:
38 .   n - the number of entries in the local mapping
39 
40     Level: advanced
41 
42     Concepts: mapping^local to global
43 
44 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
45 @*/
46 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
47 {
48   PetscFunctionBegin;
49   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
50   PetscValidIntPointer(n,2);
51   *n = mapping->n;
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNCT__
56 #define __FUNCT__ "ISLocalToGlobalMappingView"
57 /*@C
58     ISLocalToGlobalMappingView - View a local to global mapping
59 
60     Not Collective
61 
62     Input Parameters:
63 +   ltog - local to global mapping
64 -   viewer - viewer
65 
66     Level: advanced
67 
68     Concepts: mapping^local to global
69 
70 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
71 @*/
72 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
73 {
74   PetscInt       i;
75   PetscMPIInt    rank;
76   PetscBool      iascii;
77   PetscErrorCode ierr;
78 
79   PetscFunctionBegin;
80   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
81   if (!viewer) {
82     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
83   }
84   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
85 
86   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
87   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
88   if (iascii) {
89     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
90     for (i=0; i<mapping->n; i++) {
91       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
92     }
93     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
94     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
95   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
96   PetscFunctionReturn(0);
97 }
98 
99 #undef __FUNCT__
100 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
101 /*@
102     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
103     ordering and a global parallel ordering.
104 
105     Not collective
106 
107     Input Parameter:
108 .   is - index set containing the global numbers for each local number
109 
110     Output Parameter:
111 .   mapping - new mapping data structure
112 
113     Level: advanced
114 
115     Concepts: mapping^local to global
116 
117 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
118 @*/
119 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
120 {
121   PetscErrorCode ierr;
122   PetscInt       n;
123   const PetscInt *indices;
124   MPI_Comm       comm;
125 
126   PetscFunctionBegin;
127   PetscValidHeaderSpecific(is,IS_CLASSID,1);
128   PetscValidPointer(mapping,2);
129 
130   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
131   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
132   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
133   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
134   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
135   PetscFunctionReturn(0);
136 }
137 
138 #undef __FUNCT__
139 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
140 /*@C
141     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
142     ordering and a global parallel ordering.
143 
144     Collective
145 
146     Input Parameter:
147 +   sf - star forest mapping contiguous local indices to (rank, offset)
148 -   start - first global index on this process
149 
150     Output Parameter:
151 .   mapping - new mapping data structure
152 
153     Level: advanced
154 
155     Concepts: mapping^local to global
156 
157 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
158 @*/
159 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
160 {
161   PetscErrorCode ierr;
162   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
163   const PetscInt *ilocal;
164   MPI_Comm       comm;
165 
166   PetscFunctionBegin;
167   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
168   PetscValidPointer(mapping,3);
169 
170   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
171   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
172   if (ilocal) {
173     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
174   }
175   else maxlocal = nleaves;
176   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
177   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
178   for (i=0; i<nroots; i++) globals[i] = start + i;
179   for (i=0; i<maxlocal; i++) ltog[i] = -1;
180   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
181   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
182   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
183   ierr = PetscFree(globals);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
189 /*@
190     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
191     ordering and a global parallel ordering.
192 
193     Not Collective, but communicator may have more than one process
194 
195     Input Parameters:
196 +   comm - MPI communicator
197 .   n - the number of local elements
198 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
199 -   mode - see PetscCopyMode
200 
201     Output Parameter:
202 .   mapping - new mapping data structure
203 
204     Level: advanced
205 
206     Concepts: mapping^local to global
207 
208 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
209 @*/
210 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
211 {
212   PetscErrorCode ierr;
213   PetscInt       *in;
214 
215   PetscFunctionBegin;
216   if (n) PetscValidIntPointer(indices,3);
217   PetscValidPointer(mapping,4);
218 
219   *mapping = NULL;
220 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
221   ierr = ISInitializePackage(NULL);CHKERRQ(ierr);
222 #endif
223 
224   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
225                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
226   (*mapping)->n = n;
227   /*
228     Do not create the global to local mapping. This is only created if
229     ISGlobalToLocalMapping() is called
230   */
231   (*mapping)->globals = 0;
232   if (mode == PETSC_COPY_VALUES) {
233     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
234     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
235     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
236     (*mapping)->indices = in;
237   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
238   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
239   PetscFunctionReturn(0);
240 }
241 
242 #undef __FUNCT__
243 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
244 /*@
245     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
246        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
247        and VecSetLocalToGlobalMappingBlock().
248 
249     Not Collective, but communicator may have more than one process
250 
251     Input Parameters:
252 +    inmap - original point-wise mapping
253 -    bs - block size
254 
255     Output Parameter:
256 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
257 
258     Level: advanced
259 
260     Concepts: mapping^local to global
261 
262 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
263 @*/
264 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
265 {
266   PetscErrorCode ierr;
267   PetscInt       *ii,i,n;
268 
269   PetscFunctionBegin;
270   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
271   PetscValidPointer(outmap,3);
272   if (bs > 1) {
273     n = inmap->n/bs;
274     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
275     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
276     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
277     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
278   } else {
279     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
280     *outmap = inmap;
281   }
282   PetscFunctionReturn(0);
283 }
284 
285 #undef __FUNCT__
286 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
287 /*@
288     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
289        ISLocalToGlobalMapping
290 
291     Not Collective, but communicator may have more than one process
292 
293     Input Parameter:
294 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
295 - bs - block size
296 
297     Output Parameter:
298 .   outmap - pointwise mapping
299 
300     Level: advanced
301 
302     Concepts: mapping^local to global
303 
304 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
305 @*/
306 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
307 {
308   PetscErrorCode ierr;
309   PetscInt       *ii,i,n;
310 
311   PetscFunctionBegin;
312   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
313   PetscValidPointer(outmap,2);
314   if (bs > 1) {
315     n    = inmap->n*bs;
316     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
317     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
318     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
319   } else {
320     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
321     *outmap = inmap;
322   }
323   PetscFunctionReturn(0);
324 }
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
328 /*@
329    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
330    ordering and a global parallel ordering.
331 
332    Note Collective
333 
334    Input Parameters:
335 .  mapping - mapping data structure
336 
337    Level: advanced
338 
339 .seealso: ISLocalToGlobalMappingCreate()
340 @*/
341 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
342 {
343   PetscErrorCode ierr;
344 
345   PetscFunctionBegin;
346   if (!*mapping) PetscFunctionReturn(0);
347   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
348   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
349   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
350   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
351   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
352   *mapping = 0;
353   PetscFunctionReturn(0);
354 }
355 
356 #undef __FUNCT__
357 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
358 /*@
359     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
360     a new index set using the global numbering defined in an ISLocalToGlobalMapping
361     context.
362 
363     Not collective
364 
365     Input Parameters:
366 +   mapping - mapping between local and global numbering
367 -   is - index set in local numbering
368 
369     Output Parameters:
370 .   newis - index set in global numbering
371 
372     Level: advanced
373 
374     Concepts: mapping^local to global
375 
376 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
377           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
378 @*/
379 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
380 {
381   PetscErrorCode ierr;
382   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
383   const PetscInt *idxin;
384 
385   PetscFunctionBegin;
386   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
387   PetscValidHeaderSpecific(is,IS_CLASSID,2);
388   PetscValidPointer(newis,3);
389 
390   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
391   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
392   idxmap = mapping->indices;
393 
394   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
395   for (i=0; i<n; i++) {
396     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
397     idxout[i] = idxmap[idxin[i]];
398   }
399   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
400   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
401   PetscFunctionReturn(0);
402 }
403 
404 #undef __FUNCT__
405 #define __FUNCT__ "ISLocalToGlobalMappingApply"
406 /*@C
407    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
408    and converts them to the global numbering.
409 
410    Not collective
411 
412    Input Parameters:
413 +  mapping - the local to global mapping context
414 .  N - number of integers
415 -  in - input indices in local numbering
416 
417    Output Parameter:
418 .  out - indices in global numbering
419 
420    Notes:
421    The in and out array parameters may be identical.
422 
423    Level: advanced
424 
425 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
426           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
427           AOPetscToApplication(), ISGlobalToLocalMappingApply()
428 
429     Concepts: mapping^local to global
430 @*/
431 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
432 {
433   PetscInt       i,Nmax = mapping->n;
434   const PetscInt *idx = mapping->indices;
435 
436   PetscFunctionBegin;
437   for (i=0; i<N; i++) {
438     if (in[i] < 0) {
439       out[i] = in[i];
440       continue;
441     }
442     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
443     out[i] = idx[in[i]];
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 /* -----------------------------------------------------------------------------------------*/
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
452 /*
453     Creates the global fields in the ISLocalToGlobalMapping structure
454 */
455 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
456 {
457   PetscErrorCode ierr;
458   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
459 
460   PetscFunctionBegin;
461   end   = 0;
462   start = PETSC_MAX_INT;
463 
464   for (i=0; i<n; i++) {
465     if (idx[i] < 0) continue;
466     if (idx[i] < start) start = idx[i];
467     if (idx[i] > end)   end   = idx[i];
468   }
469   if (start > end) {start = 0; end = -1;}
470   mapping->globalstart = start;
471   mapping->globalend   = end;
472 
473   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
474   mapping->globals = globals;
475   for (i=0; i<end-start+1; i++) globals[i] = -1;
476   for (i=0; i<n; i++) {
477     if (idx[i] < 0) continue;
478     globals[idx[i] - start] = i;
479   }
480 
481   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "ISGlobalToLocalMappingApply"
487 /*@
488     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
489     specified with a global numbering.
490 
491     Not collective
492 
493     Input Parameters:
494 +   mapping - mapping between local and global numbering
495 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
496            IS_GTOLM_DROP - drops the indices with no local value from the output list
497 .   n - number of global indices to map
498 -   idx - global indices to map
499 
500     Output Parameters:
501 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
502 -   idxout - local index of each global index, one must pass in an array long enough
503              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
504              idxout == NULL to determine the required length (returned in nout)
505              and then allocate the required space and call ISGlobalToLocalMappingApply()
506              a second time to set the values.
507 
508     Notes:
509     Either nout or idxout may be NULL. idx and idxout may be identical.
510 
511     This is not scalable in memory usage. Each processor requires O(Nglobal) size
512     array to compute these.
513 
514     Level: advanced
515 
516     Developer Note: The manual page states that idx and idxout may be identical but the calling
517        sequence declares idx as const so it cannot be the same as idxout.
518 
519     Concepts: mapping^global to local
520 
521 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
522           ISLocalToGlobalMappingDestroy()
523 @*/
524 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
525                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
526 {
527   PetscInt       i,*globals,nf = 0,tmp,start,end;
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
532   if (!mapping->globals) {
533     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
534   }
535   globals = mapping->globals;
536   start   = mapping->globalstart;
537   end     = mapping->globalend;
538 
539   if (type == IS_GTOLM_MASK) {
540     if (idxout) {
541       for (i=0; i<n; i++) {
542         if (idx[i] < 0) idxout[i] = idx[i];
543         else if (idx[i] < start) idxout[i] = -1;
544         else if (idx[i] > end)   idxout[i] = -1;
545         else                     idxout[i] = globals[idx[i] - start];
546       }
547     }
548     if (nout) *nout = n;
549   } else {
550     if (idxout) {
551       for (i=0; i<n; i++) {
552         if (idx[i] < 0) continue;
553         if (idx[i] < start) continue;
554         if (idx[i] > end) continue;
555         tmp = globals[idx[i] - start];
556         if (tmp < 0) continue;
557         idxout[nf++] = tmp;
558       }
559     } else {
560       for (i=0; i<n; i++) {
561         if (idx[i] < 0) continue;
562         if (idx[i] < start) continue;
563         if (idx[i] > end) continue;
564         tmp = globals[idx[i] - start];
565         if (tmp < 0) continue;
566         nf++;
567       }
568     }
569     if (nout) *nout = nf;
570   }
571   PetscFunctionReturn(0);
572 }
573 
574 #undef __FUNCT__
575 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
576 /*@C
577     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
578      each index shared by more than one processor
579 
580     Collective on ISLocalToGlobalMapping
581 
582     Input Parameters:
583 .   mapping - the mapping from local to global indexing
584 
585     Output Parameter:
586 +   nproc - number of processors that are connected to this one
587 .   proc - neighboring processors
588 .   numproc - number of indices for each subdomain (processor)
589 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
590 
591     Level: advanced
592 
593     Concepts: mapping^local to global
594 
595     Fortran Usage:
596 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
597 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
598           PetscInt indices[nproc][numprocmax],ierr)
599         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
600         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
601 
602 
603 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
604           ISLocalToGlobalMappingRestoreInfo()
605 @*/
606 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
607 {
608   PetscErrorCode ierr;
609   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
610   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
611   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
612   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
613   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
614   PetscInt       first_procs,first_numprocs,*first_indices;
615   MPI_Request    *recv_waits,*send_waits;
616   MPI_Status     recv_status,*send_status,*recv_statuses;
617   MPI_Comm       comm;
618   PetscBool      debug = PETSC_FALSE;
619 
620   PetscFunctionBegin;
621   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
622   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
623   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
624   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
625   if (size == 1) {
626     *nproc         = 0;
627     *procs         = NULL;
628     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
629     (*numprocs)[0] = 0;
630     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
631     (*indices)[0]  = NULL;
632     PetscFunctionReturn(0);
633   }
634 
635   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
636 
637   /*
638     Notes on ISLocalToGlobalMappingGetInfo
639 
640     globally owned node - the nodes that have been assigned to this processor in global
641            numbering, just for this routine.
642 
643     nontrivial globally owned node - node assigned to this processor that is on a subdomain
644            boundary (i.e. is has more than one local owner)
645 
646     locally owned node - node that exists on this processors subdomain
647 
648     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
649            local subdomain
650   */
651   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
652   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
653   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
654 
655   for (i=0; i<n; i++) {
656     if (lindices[i] > max) max = lindices[i];
657   }
658   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
659   Ng++;
660   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
661   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
662   scale  = Ng/size + 1;
663   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
664   rstart = scale*rank;
665 
666   /* determine ownership ranges of global indices */
667   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
668   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
669 
670   /* determine owners of each local node  */
671   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
672   for (i=0; i<n; i++) {
673     proc             = lindices[i]/scale; /* processor that globally owns this index */
674     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
675     owner[i]         = proc;
676     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
677   }
678   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
679   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
680 
681   /* inform other processors of number of messages and max length*/
682   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
683   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
684 
685   /* post receives for owned rows */
686   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
687   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
688   for (i=0; i<nrecvs; i++) {
689     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
690   }
691 
692   /* pack messages containing lists of local nodes to owners */
693   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
694   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
695   starts[0] = 0;
696   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
697   for (i=0; i<n; i++) {
698     sends[starts[owner[i]]++] = lindices[i];
699     sends[starts[owner[i]]++] = i;
700   }
701   ierr = PetscFree(owner);CHKERRQ(ierr);
702   starts[0] = 0;
703   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
704 
705   /* send the messages */
706   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
707   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
708   cnt = 0;
709   for (i=0; i<size; i++) {
710     if (nprocs[2*i]) {
711       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
712       dest[cnt] = i;
713       cnt++;
714     }
715   }
716   ierr = PetscFree(starts);CHKERRQ(ierr);
717 
718   /* wait on receives */
719   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
720   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
721   cnt  = nrecvs;
722   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
723   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
724   while (cnt) {
725     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
726     /* unpack receives into our local space */
727     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
728     source[imdex] = recv_status.MPI_SOURCE;
729     len[imdex]    = len[imdex]/2;
730     /* count how many local owners for each of my global owned indices */
731     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
732     cnt--;
733   }
734   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
735 
736   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
737   nowned  = 0;
738   nownedm = 0;
739   for (i=0; i<ng; i++) {
740     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
741   }
742 
743   /* create single array to contain rank of all local owners of each globally owned index */
744   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
745   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
746   starts[0] = 0;
747   for (i=1; i<ng; i++) {
748     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
749     else starts[i] = starts[i-1];
750   }
751 
752   /* for each nontrival globally owned node list all arriving processors */
753   for (i=0; i<nrecvs; i++) {
754     for (j=0; j<len[i]; j++) {
755       node = recvs[2*i*nmax+2*j]-rstart;
756       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
757     }
758   }
759 
760   if (debug) { /* -----------------------------------  */
761     starts[0] = 0;
762     for (i=1; i<ng; i++) {
763       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
764       else starts[i] = starts[i-1];
765     }
766     for (i=0; i<ng; i++) {
767       if (nownedsenders[i] > 1) {
768         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
769         for (j=0; j<nownedsenders[i]; j++) {
770           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
771         }
772         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
773       }
774     }
775     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
776   } /* -----------------------------------  */
777 
778   /* wait on original sends */
779   if (nsends) {
780     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
781     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
782     ierr = PetscFree(send_status);CHKERRQ(ierr);
783   }
784   ierr = PetscFree(send_waits);CHKERRQ(ierr);
785   ierr = PetscFree(sends);CHKERRQ(ierr);
786   ierr = PetscFree(nprocs);CHKERRQ(ierr);
787 
788   /* pack messages to send back to local owners */
789   starts[0] = 0;
790   for (i=1; i<ng; i++) {
791     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
792     else starts[i] = starts[i-1];
793   }
794   nsends2 = nrecvs;
795   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
796   for (i=0; i<nrecvs; i++) {
797     nprocs[i] = 1;
798     for (j=0; j<len[i]; j++) {
799       node = recvs[2*i*nmax+2*j]-rstart;
800       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
801     }
802   }
803   nt = 0;
804   for (i=0; i<nsends2; i++) nt += nprocs[i];
805 
806   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
807   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
808 
809   starts2[0] = 0;
810   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
811   /*
812      Each message is 1 + nprocs[i] long, and consists of
813        (0) the number of nodes being sent back
814        (1) the local node number,
815        (2) the number of processors sharing it,
816        (3) the processors sharing it
817   */
818   for (i=0; i<nsends2; i++) {
819     cnt = 1;
820     sends2[starts2[i]] = 0;
821     for (j=0; j<len[i]; j++) {
822       node = recvs[2*i*nmax+2*j]-rstart;
823       if (nownedsenders[node] > 1) {
824         sends2[starts2[i]]++;
825         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
826         sends2[starts2[i]+cnt++] = nownedsenders[node];
827         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
828         cnt += nownedsenders[node];
829       }
830     }
831   }
832 
833   /* receive the message lengths */
834   nrecvs2 = nsends;
835   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
836   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
837   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
838   for (i=0; i<nrecvs2; i++) {
839     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
840   }
841 
842   /* send the message lengths */
843   for (i=0; i<nsends2; i++) {
844     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
845   }
846 
847   /* wait on receives of lens */
848   if (nrecvs2) {
849     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
850     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
851     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
852   }
853   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
854 
855   starts3[0] = 0;
856   nt         = 0;
857   for (i=0; i<nrecvs2-1; i++) {
858     starts3[i+1] = starts3[i] + lens2[i];
859     nt          += lens2[i];
860   }
861   if (nrecvs2) nt += lens2[nrecvs2-1];
862 
863   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
864   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
865   for (i=0; i<nrecvs2; i++) {
866     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
867   }
868 
869   /* send the messages */
870   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
871   for (i=0; i<nsends2; i++) {
872     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
873   }
874 
875   /* wait on receives */
876   if (nrecvs2) {
877     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
878     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
879     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
880   }
881   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
882   ierr = PetscFree(nprocs);CHKERRQ(ierr);
883 
884   if (debug) { /* -----------------------------------  */
885     cnt = 0;
886     for (i=0; i<nrecvs2; i++) {
887       nt = recvs2[cnt++];
888       for (j=0; j<nt; j++) {
889         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
890         for (k=0; k<recvs2[cnt+1]; k++) {
891           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
892         }
893         cnt += 2 + recvs2[cnt+1];
894         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
895       }
896     }
897     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
898   } /* -----------------------------------  */
899 
900   /* count number subdomains for each local node */
901   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
902   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
903   cnt  = 0;
904   for (i=0; i<nrecvs2; i++) {
905     nt = recvs2[cnt++];
906     for (j=0; j<nt; j++) {
907       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
908       cnt += 2 + recvs2[cnt+1];
909     }
910   }
911   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
912   *nproc    = nt;
913   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
914   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
915   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
916   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
917   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
918   cnt       = 0;
919   for (i=0; i<size; i++) {
920     if (nprocs[i] > 0) {
921       bprocs[i]        = cnt;
922       (*procs)[cnt]    = i;
923       (*numprocs)[cnt] = nprocs[i];
924       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
925       cnt++;
926     }
927   }
928 
929   /* make the list of subdomains for each nontrivial local node */
930   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
931   cnt  = 0;
932   for (i=0; i<nrecvs2; i++) {
933     nt = recvs2[cnt++];
934     for (j=0; j<nt; j++) {
935       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
936       cnt += 2 + recvs2[cnt+1];
937     }
938   }
939   ierr = PetscFree(bprocs);CHKERRQ(ierr);
940   ierr = PetscFree(recvs2);CHKERRQ(ierr);
941 
942   /* sort the node indexing by their global numbers */
943   nt = *nproc;
944   for (i=0; i<nt; i++) {
945     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
946     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
947     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
948     ierr = PetscFree(tmp);CHKERRQ(ierr);
949   }
950 
951   if (debug) { /* -----------------------------------  */
952     nt = *nproc;
953     for (i=0; i<nt; i++) {
954       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
955       for (j=0; j<(*numprocs)[i]; j++) {
956         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
957       }
958       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
959     }
960     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
961   } /* -----------------------------------  */
962 
963   /* wait on sends */
964   if (nsends2) {
965     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
966     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
967     ierr = PetscFree(send_status);CHKERRQ(ierr);
968   }
969 
970   ierr = PetscFree(starts3);CHKERRQ(ierr);
971   ierr = PetscFree(dest);CHKERRQ(ierr);
972   ierr = PetscFree(send_waits);CHKERRQ(ierr);
973 
974   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
975   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
976   ierr = PetscFree(starts);CHKERRQ(ierr);
977   ierr = PetscFree(starts2);CHKERRQ(ierr);
978   ierr = PetscFree(lens2);CHKERRQ(ierr);
979 
980   ierr = PetscFree(source);CHKERRQ(ierr);
981   ierr = PetscFree(len);CHKERRQ(ierr);
982   ierr = PetscFree(recvs);CHKERRQ(ierr);
983   ierr = PetscFree(nprocs);CHKERRQ(ierr);
984   ierr = PetscFree(sends2);CHKERRQ(ierr);
985 
986   /* put the information about myself as the first entry in the list */
987   first_procs    = (*procs)[0];
988   first_numprocs = (*numprocs)[0];
989   first_indices  = (*indices)[0];
990   for (i=0; i<*nproc; i++) {
991     if ((*procs)[i] == rank) {
992       (*procs)[0]    = (*procs)[i];
993       (*numprocs)[0] = (*numprocs)[i];
994       (*indices)[0]  = (*indices)[i];
995       (*procs)[i]    = first_procs;
996       (*numprocs)[i] = first_numprocs;
997       (*indices)[i]  = first_indices;
998       break;
999     }
1000   }
1001   PetscFunctionReturn(0);
1002 }
1003 
1004 #undef __FUNCT__
1005 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1006 /*@C
1007     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1008 
1009     Collective on ISLocalToGlobalMapping
1010 
1011     Input Parameters:
1012 .   mapping - the mapping from local to global indexing
1013 
1014     Output Parameter:
1015 +   nproc - number of processors that are connected to this one
1016 .   proc - neighboring processors
1017 .   numproc - number of indices for each processor
1018 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1019 
1020     Level: advanced
1021 
1022 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1023           ISLocalToGlobalMappingGetInfo()
1024 @*/
1025 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1026 {
1027   PetscErrorCode ierr;
1028   PetscInt       i;
1029 
1030   PetscFunctionBegin;
1031   ierr = PetscFree(*procs);CHKERRQ(ierr);
1032   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1033   if (*indices) {
1034     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1035     for (i=1; i<*nproc; i++) {
1036       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1037     }
1038     ierr = PetscFree(*indices);CHKERRQ(ierr);
1039   }
1040   PetscFunctionReturn(0);
1041 }
1042 
1043 #undef __FUNCT__
1044 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1045 /*@C
1046    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1047 
1048    Not Collective
1049 
1050    Input Arguments:
1051 . ltog - local to global mapping
1052 
1053    Output Arguments:
1054 . array - array of indices
1055 
1056    Level: advanced
1057 
1058 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1059 @*/
1060 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1061 {
1062   PetscFunctionBegin;
1063   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1064   PetscValidPointer(array,2);
1065   *array = ltog->indices;
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1071 /*@C
1072    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1073 
1074    Not Collective
1075 
1076    Input Arguments:
1077 + ltog - local to global mapping
1078 - array - array of indices
1079 
1080    Level: advanced
1081 
1082 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1083 @*/
1084 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1085 {
1086   PetscFunctionBegin;
1087   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1088   PetscValidPointer(array,2);
1089   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1090   *array = NULL;
1091   PetscFunctionReturn(0);
1092 }
1093 
1094 #undef __FUNCT__
1095 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1096 /*@C
1097    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1098 
1099    Not Collective
1100 
1101    Input Arguments:
1102 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1103 . n - number of mappings to concatenate
1104 - ltogs - local to global mappings
1105 
1106    Output Arguments:
1107 . ltogcat - new mapping
1108 
1109    Level: advanced
1110 
1111 .seealso: ISLocalToGlobalMappingCreate()
1112 @*/
1113 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1114 {
1115   PetscInt       i,cnt,m,*idx;
1116   PetscErrorCode ierr;
1117 
1118   PetscFunctionBegin;
1119   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1120   if (n > 0) PetscValidPointer(ltogs,3);
1121   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1122   PetscValidPointer(ltogcat,4);
1123   for (cnt=0,i=0; i<n; i++) {
1124     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1125     cnt += m;
1126   }
1127   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1128   for (cnt=0,i=0; i<n; i++) {
1129     const PetscInt *subidx;
1130     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1131     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1132     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1133     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1134     cnt += m;
1135   }
1136   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1137   PetscFunctionReturn(0);
1138 }
1139