xref: /petsc/src/vec/is/utils/isltog.c (revision 22a7b760d1bac7490ac2e75bf367dc393fb2e18c)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISG2LMapApply"
10 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
11 {
12   PetscErrorCode ierr;
13   PetscInt       i,start,end;
14 
15   PetscFunctionBegin;
16   if (!mapping->globals) {
17     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
18   }
19   start = mapping->globalstart;
20   end = mapping->globalend;
21   for (i=0; i<n; i++) {
22     if (in[i] < 0)          out[i] = in[i];
23     else if (in[i] < start) out[i] = -1;
24     else if (in[i] > end)   out[i] = -1;
25     else                    out[i] = mapping->globals[in[i] - start];
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
33 /*@C
34     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
35 
36     Not Collective
37 
38     Input Parameter:
39 .   ltog - local to global mapping
40 
41     Output Parameter:
42 .   n - the number of entries in the local mapping
43 
44     Level: advanced
45 
46     Concepts: mapping^local to global
47 
48 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
49 @*/
50 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
51 {
52   PetscFunctionBegin;
53   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
54   PetscValidIntPointer(n,2);
55   *n = mapping->n;
56   PetscFunctionReturn(0);
57 }
58 
59 #undef __FUNCT__
60 #define __FUNCT__ "ISLocalToGlobalMappingView"
61 /*@C
62     ISLocalToGlobalMappingView - View a local to global mapping
63 
64     Not Collective
65 
66     Input Parameters:
67 +   ltog - local to global mapping
68 -   viewer - viewer
69 
70     Level: advanced
71 
72     Concepts: mapping^local to global
73 
74 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
75 @*/
76 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
77 {
78   PetscInt       i;
79   PetscMPIInt    rank;
80   PetscBool      iascii;
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
85   if (!viewer) {
86     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
87   }
88   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
89 
90   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
91   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
92   if (iascii) {
93     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
94     for (i=0; i<mapping->n; i++) {
95       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
96     }
97     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
98     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
99   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
100   PetscFunctionReturn(0);
101 }
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
105 /*@
106     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
107     ordering and a global parallel ordering.
108 
109     Not collective
110 
111     Input Parameter:
112 .   is - index set containing the global numbers for each local number
113 
114     Output Parameter:
115 .   mapping - new mapping data structure
116 
117     Level: advanced
118 
119     Concepts: mapping^local to global
120 
121 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
122 @*/
123 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
124 {
125   PetscErrorCode ierr;
126   PetscInt       n;
127   const PetscInt *indices;
128   MPI_Comm       comm;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(is,IS_CLASSID,1);
132   PetscValidPointer(mapping,2);
133 
134   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
135   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
136   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
137   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
138   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
139   PetscFunctionReturn(0);
140 }
141 
142 #undef __FUNCT__
143 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
144 /*@C
145     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
146     ordering and a global parallel ordering.
147 
148     Collective
149 
150     Input Parameter:
151 +   sf - star forest mapping contiguous local indices to (rank, offset)
152 -   start - first global index on this process
153 
154     Output Parameter:
155 .   mapping - new mapping data structure
156 
157     Level: advanced
158 
159     Concepts: mapping^local to global
160 
161 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
162 @*/
163 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
164 {
165   PetscErrorCode ierr;
166   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
167   const PetscInt *ilocal;
168   MPI_Comm       comm;
169 
170   PetscFunctionBegin;
171   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
172   PetscValidPointer(mapping,3);
173 
174   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
175   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
176   if (ilocal) {
177     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
178   }
179   else maxlocal = nleaves;
180   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
181   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
182   for (i=0; i<nroots; i++) globals[i] = start + i;
183   for (i=0; i<maxlocal; i++) ltog[i] = -1;
184   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
185   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
186   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
187   ierr = PetscFree(globals);CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
193 /*@
194     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
195     ordering and a global parallel ordering.
196 
197     Not Collective, but communicator may have more than one process
198 
199     Input Parameters:
200 +   comm - MPI communicator
201 .   n - the number of local elements
202 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
203 -   mode - see PetscCopyMode
204 
205     Output Parameter:
206 .   mapping - new mapping data structure
207 
208     Level: advanced
209 
210     Concepts: mapping^local to global
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
215 {
216   PetscErrorCode ierr;
217   PetscInt       *in;
218 
219   PetscFunctionBegin;
220   if (n) PetscValidIntPointer(indices,3);
221   PetscValidPointer(mapping,4);
222 
223   *mapping = NULL;
224 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
225   ierr = ISInitializePackage();CHKERRQ(ierr);
226 #endif
227 
228   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
229                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
230   (*mapping)->n = n;
231   /*
232     Do not create the global to local mapping. This is only created if
233     ISGlobalToLocalMapping() is called
234   */
235   (*mapping)->globals = 0;
236   if (mode == PETSC_COPY_VALUES) {
237     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
238     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
239     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
240     (*mapping)->indices = in;
241   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
242   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
243   PetscFunctionReturn(0);
244 }
245 
246 #undef __FUNCT__
247 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
248 /*@
249     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
250        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
251        and VecSetLocalToGlobalMappingBlock().
252 
253     Not Collective, but communicator may have more than one process
254 
255     Input Parameters:
256 +    inmap - original point-wise mapping
257 -    bs - block size
258 
259     Output Parameter:
260 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
261 
262     Level: advanced
263 
264     Concepts: mapping^local to global
265 
266 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
267 @*/
268 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
269 {
270   PetscErrorCode ierr;
271   PetscInt       *ii,i,n;
272 
273   PetscFunctionBegin;
274   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
275   PetscValidPointer(outmap,3);
276   if (bs > 1) {
277     n = inmap->n/bs;
278     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
279     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
280     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
281     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
282   } else {
283     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
284     *outmap = inmap;
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
291 /*@
292     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
293        ISLocalToGlobalMapping
294 
295     Not Collective, but communicator may have more than one process
296 
297     Input Parameter:
298 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
299 - bs - block size
300 
301     Output Parameter:
302 .   outmap - pointwise mapping
303 
304     Level: advanced
305 
306     Concepts: mapping^local to global
307 
308 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
309 @*/
310 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
311 {
312   PetscErrorCode ierr;
313   PetscInt       *ii,i,n;
314 
315   PetscFunctionBegin;
316   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
317   PetscValidPointer(outmap,2);
318   if (bs > 1) {
319     n    = inmap->n*bs;
320     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
321     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
322     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
323   } else {
324     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
325     *outmap = inmap;
326   }
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
332 /*@
333    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
334    ordering and a global parallel ordering.
335 
336    Note Collective
337 
338    Input Parameters:
339 .  mapping - mapping data structure
340 
341    Level: advanced
342 
343 .seealso: ISLocalToGlobalMappingCreate()
344 @*/
345 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
346 {
347   PetscErrorCode ierr;
348 
349   PetscFunctionBegin;
350   if (!*mapping) PetscFunctionReturn(0);
351   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
352   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
353   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
354   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
355   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
356   *mapping = 0;
357   PetscFunctionReturn(0);
358 }
359 
360 #undef __FUNCT__
361 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
362 /*@
363     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
364     a new index set using the global numbering defined in an ISLocalToGlobalMapping
365     context.
366 
367     Not collective
368 
369     Input Parameters:
370 +   mapping - mapping between local and global numbering
371 -   is - index set in local numbering
372 
373     Output Parameters:
374 .   newis - index set in global numbering
375 
376     Level: advanced
377 
378     Concepts: mapping^local to global
379 
380 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
381           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
382 @*/
383 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
384 {
385   PetscErrorCode ierr;
386   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
387   const PetscInt *idxin;
388 
389   PetscFunctionBegin;
390   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
391   PetscValidHeaderSpecific(is,IS_CLASSID,2);
392   PetscValidPointer(newis,3);
393 
394   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
395   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
396   idxmap = mapping->indices;
397 
398   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
399   for (i=0; i<n; i++) {
400     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
401     idxout[i] = idxmap[idxin[i]];
402   }
403   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
404   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
405   PetscFunctionReturn(0);
406 }
407 
408 #undef __FUNCT__
409 #define __FUNCT__ "ISLocalToGlobalMappingApply"
410 /*@
411    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
412    and converts them to the global numbering.
413 
414    Not collective
415 
416    Input Parameters:
417 +  mapping - the local to global mapping context
418 .  N - number of integers
419 -  in - input indices in local numbering
420 
421    Output Parameter:
422 .  out - indices in global numbering
423 
424    Notes:
425    The in and out array parameters may be identical.
426 
427    Level: advanced
428 
429 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
430           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
431           AOPetscToApplication(), ISGlobalToLocalMappingApply()
432 
433     Concepts: mapping^local to global
434 @*/
435 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
436 {
437   PetscInt       i,Nmax = mapping->n;
438   const PetscInt *idx = mapping->indices;
439 
440   PetscFunctionBegin;
441   for (i=0; i<N; i++) {
442     if (in[i] < 0) {
443       out[i] = in[i];
444       continue;
445     }
446     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
447     out[i] = idx[in[i]];
448   }
449   PetscFunctionReturn(0);
450 }
451 
452 /* -----------------------------------------------------------------------------------------*/
453 
454 #undef __FUNCT__
455 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
456 /*
457     Creates the global fields in the ISLocalToGlobalMapping structure
458 */
459 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
460 {
461   PetscErrorCode ierr;
462   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
463 
464   PetscFunctionBegin;
465   end   = 0;
466   start = PETSC_MAX_INT;
467 
468   for (i=0; i<n; i++) {
469     if (idx[i] < 0) continue;
470     if (idx[i] < start) start = idx[i];
471     if (idx[i] > end)   end   = idx[i];
472   }
473   if (start > end) {start = 0; end = -1;}
474   mapping->globalstart = start;
475   mapping->globalend   = end;
476 
477   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
478   mapping->globals = globals;
479   for (i=0; i<end-start+1; i++) globals[i] = -1;
480   for (i=0; i<n; i++) {
481     if (idx[i] < 0) continue;
482     globals[idx[i] - start] = i;
483   }
484 
485   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
486   PetscFunctionReturn(0);
487 }
488 
489 #undef __FUNCT__
490 #define __FUNCT__ "ISGlobalToLocalMappingApply"
491 /*@
492     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
493     specified with a global numbering.
494 
495     Not collective
496 
497     Input Parameters:
498 +   mapping - mapping between local and global numbering
499 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
500            IS_GTOLM_DROP - drops the indices with no local value from the output list
501 .   n - number of global indices to map
502 -   idx - global indices to map
503 
504     Output Parameters:
505 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
506 -   idxout - local index of each global index, one must pass in an array long enough
507              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
508              idxout == NULL to determine the required length (returned in nout)
509              and then allocate the required space and call ISGlobalToLocalMappingApply()
510              a second time to set the values.
511 
512     Notes:
513     Either nout or idxout may be NULL. idx and idxout may be identical.
514 
515     This is not scalable in memory usage. Each processor requires O(Nglobal) size
516     array to compute these.
517 
518     Level: advanced
519 
520     Developer Note: The manual page states that idx and idxout may be identical but the calling
521        sequence declares idx as const so it cannot be the same as idxout.
522 
523     Concepts: mapping^global to local
524 
525 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
526           ISLocalToGlobalMappingDestroy()
527 @*/
528 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
529                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
530 {
531   PetscInt       i,*globals,nf = 0,tmp,start,end;
532   PetscErrorCode ierr;
533 
534   PetscFunctionBegin;
535   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
536   if (!mapping->globals) {
537     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
538   }
539   globals = mapping->globals;
540   start   = mapping->globalstart;
541   end     = mapping->globalend;
542 
543   if (type == IS_GTOLM_MASK) {
544     if (idxout) {
545       for (i=0; i<n; i++) {
546         if (idx[i] < 0) idxout[i] = idx[i];
547         else if (idx[i] < start) idxout[i] = -1;
548         else if (idx[i] > end)   idxout[i] = -1;
549         else                     idxout[i] = globals[idx[i] - start];
550       }
551     }
552     if (nout) *nout = n;
553   } else {
554     if (idxout) {
555       for (i=0; i<n; i++) {
556         if (idx[i] < 0) continue;
557         if (idx[i] < start) continue;
558         if (idx[i] > end) continue;
559         tmp = globals[idx[i] - start];
560         if (tmp < 0) continue;
561         idxout[nf++] = tmp;
562       }
563     } else {
564       for (i=0; i<n; i++) {
565         if (idx[i] < 0) continue;
566         if (idx[i] < start) continue;
567         if (idx[i] > end) continue;
568         tmp = globals[idx[i] - start];
569         if (tmp < 0) continue;
570         nf++;
571       }
572     }
573     if (nout) *nout = nf;
574   }
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
580 /*@C
581     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
582      each index shared by more than one processor
583 
584     Collective on ISLocalToGlobalMapping
585 
586     Input Parameters:
587 .   mapping - the mapping from local to global indexing
588 
589     Output Parameter:
590 +   nproc - number of processors that are connected to this one
591 .   proc - neighboring processors
592 .   numproc - number of indices for each subdomain (processor)
593 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
594 
595     Level: advanced
596 
597     Concepts: mapping^local to global
598 
599     Fortran Usage:
600 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
601 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
602           PetscInt indices[nproc][numprocmax],ierr)
603         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
604         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
605 
606 
607 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
608           ISLocalToGlobalMappingRestoreInfo()
609 @*/
610 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
611 {
612   PetscErrorCode ierr;
613   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
614   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
615   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
616   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
617   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
618   PetscInt       first_procs,first_numprocs,*first_indices;
619   MPI_Request    *recv_waits,*send_waits;
620   MPI_Status     recv_status,*send_status,*recv_statuses;
621   MPI_Comm       comm;
622   PetscBool      debug = PETSC_FALSE;
623 
624   PetscFunctionBegin;
625   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
626   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
627   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
628   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
629   if (size == 1) {
630     *nproc         = 0;
631     *procs         = NULL;
632     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
633     (*numprocs)[0] = 0;
634     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
635     (*indices)[0]  = NULL;
636     PetscFunctionReturn(0);
637   }
638 
639   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
640 
641   /*
642     Notes on ISLocalToGlobalMappingGetInfo
643 
644     globally owned node - the nodes that have been assigned to this processor in global
645            numbering, just for this routine.
646 
647     nontrivial globally owned node - node assigned to this processor that is on a subdomain
648            boundary (i.e. is has more than one local owner)
649 
650     locally owned node - node that exists on this processors subdomain
651 
652     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
653            local subdomain
654   */
655   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
656   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
657   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
658 
659   for (i=0; i<n; i++) {
660     if (lindices[i] > max) max = lindices[i];
661   }
662   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
663   Ng++;
664   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
665   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
666   scale  = Ng/size + 1;
667   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
668   rstart = scale*rank;
669 
670   /* determine ownership ranges of global indices */
671   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
672   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
673 
674   /* determine owners of each local node  */
675   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
676   for (i=0; i<n; i++) {
677     proc             = lindices[i]/scale; /* processor that globally owns this index */
678     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
679     owner[i]         = proc;
680     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
681   }
682   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
683   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
684 
685   /* inform other processors of number of messages and max length*/
686   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
687   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
688 
689   /* post receives for owned rows */
690   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
691   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
692   for (i=0; i<nrecvs; i++) {
693     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
694   }
695 
696   /* pack messages containing lists of local nodes to owners */
697   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
698   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
699   starts[0] = 0;
700   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
701   for (i=0; i<n; i++) {
702     sends[starts[owner[i]]++] = lindices[i];
703     sends[starts[owner[i]]++] = i;
704   }
705   ierr = PetscFree(owner);CHKERRQ(ierr);
706   starts[0] = 0;
707   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
708 
709   /* send the messages */
710   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
711   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
712   cnt = 0;
713   for (i=0; i<size; i++) {
714     if (nprocs[2*i]) {
715       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
716       dest[cnt] = i;
717       cnt++;
718     }
719   }
720   ierr = PetscFree(starts);CHKERRQ(ierr);
721 
722   /* wait on receives */
723   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
724   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
725   cnt  = nrecvs;
726   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
727   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
728   while (cnt) {
729     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
730     /* unpack receives into our local space */
731     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
732     source[imdex] = recv_status.MPI_SOURCE;
733     len[imdex]    = len[imdex]/2;
734     /* count how many local owners for each of my global owned indices */
735     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
736     cnt--;
737   }
738   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
739 
740   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
741   nowned  = 0;
742   nownedm = 0;
743   for (i=0; i<ng; i++) {
744     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
745   }
746 
747   /* create single array to contain rank of all local owners of each globally owned index */
748   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
749   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
750   starts[0] = 0;
751   for (i=1; i<ng; i++) {
752     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
753     else starts[i] = starts[i-1];
754   }
755 
756   /* for each nontrival globally owned node list all arriving processors */
757   for (i=0; i<nrecvs; i++) {
758     for (j=0; j<len[i]; j++) {
759       node = recvs[2*i*nmax+2*j]-rstart;
760       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
761     }
762   }
763 
764   if (debug) { /* -----------------------------------  */
765     starts[0] = 0;
766     for (i=1; i<ng; i++) {
767       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
768       else starts[i] = starts[i-1];
769     }
770     for (i=0; i<ng; i++) {
771       if (nownedsenders[i] > 1) {
772         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
773         for (j=0; j<nownedsenders[i]; j++) {
774           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
775         }
776         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
777       }
778     }
779     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
780   } /* -----------------------------------  */
781 
782   /* wait on original sends */
783   if (nsends) {
784     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
785     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
786     ierr = PetscFree(send_status);CHKERRQ(ierr);
787   }
788   ierr = PetscFree(send_waits);CHKERRQ(ierr);
789   ierr = PetscFree(sends);CHKERRQ(ierr);
790   ierr = PetscFree(nprocs);CHKERRQ(ierr);
791 
792   /* pack messages to send back to local owners */
793   starts[0] = 0;
794   for (i=1; i<ng; i++) {
795     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
796     else starts[i] = starts[i-1];
797   }
798   nsends2 = nrecvs;
799   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
800   for (i=0; i<nrecvs; i++) {
801     nprocs[i] = 1;
802     for (j=0; j<len[i]; j++) {
803       node = recvs[2*i*nmax+2*j]-rstart;
804       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
805     }
806   }
807   nt = 0;
808   for (i=0; i<nsends2; i++) nt += nprocs[i];
809 
810   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
811   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
812 
813   starts2[0] = 0;
814   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
815   /*
816      Each message is 1 + nprocs[i] long, and consists of
817        (0) the number of nodes being sent back
818        (1) the local node number,
819        (2) the number of processors sharing it,
820        (3) the processors sharing it
821   */
822   for (i=0; i<nsends2; i++) {
823     cnt = 1;
824     sends2[starts2[i]] = 0;
825     for (j=0; j<len[i]; j++) {
826       node = recvs[2*i*nmax+2*j]-rstart;
827       if (nownedsenders[node] > 1) {
828         sends2[starts2[i]]++;
829         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
830         sends2[starts2[i]+cnt++] = nownedsenders[node];
831         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
832         cnt += nownedsenders[node];
833       }
834     }
835   }
836 
837   /* receive the message lengths */
838   nrecvs2 = nsends;
839   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
840   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
841   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
842   for (i=0; i<nrecvs2; i++) {
843     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
844   }
845 
846   /* send the message lengths */
847   for (i=0; i<nsends2; i++) {
848     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
849   }
850 
851   /* wait on receives of lens */
852   if (nrecvs2) {
853     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
854     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
855     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
856   }
857   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
858 
859   starts3[0] = 0;
860   nt         = 0;
861   for (i=0; i<nrecvs2-1; i++) {
862     starts3[i+1] = starts3[i] + lens2[i];
863     nt          += lens2[i];
864   }
865   if (nrecvs2) nt += lens2[nrecvs2-1];
866 
867   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
868   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
869   for (i=0; i<nrecvs2; i++) {
870     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
871   }
872 
873   /* send the messages */
874   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
875   for (i=0; i<nsends2; i++) {
876     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
877   }
878 
879   /* wait on receives */
880   if (nrecvs2) {
881     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
882     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
883     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
884   }
885   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
886   ierr = PetscFree(nprocs);CHKERRQ(ierr);
887 
888   if (debug) { /* -----------------------------------  */
889     cnt = 0;
890     for (i=0; i<nrecvs2; i++) {
891       nt = recvs2[cnt++];
892       for (j=0; j<nt; j++) {
893         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
894         for (k=0; k<recvs2[cnt+1]; k++) {
895           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
896         }
897         cnt += 2 + recvs2[cnt+1];
898         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
899       }
900     }
901     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
902   } /* -----------------------------------  */
903 
904   /* count number subdomains for each local node */
905   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
906   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
907   cnt  = 0;
908   for (i=0; i<nrecvs2; i++) {
909     nt = recvs2[cnt++];
910     for (j=0; j<nt; j++) {
911       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
912       cnt += 2 + recvs2[cnt+1];
913     }
914   }
915   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
916   *nproc    = nt;
917   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
918   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
919   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
920   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
921   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
922   cnt       = 0;
923   for (i=0; i<size; i++) {
924     if (nprocs[i] > 0) {
925       bprocs[i]        = cnt;
926       (*procs)[cnt]    = i;
927       (*numprocs)[cnt] = nprocs[i];
928       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
929       cnt++;
930     }
931   }
932 
933   /* make the list of subdomains for each nontrivial local node */
934   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
935   cnt  = 0;
936   for (i=0; i<nrecvs2; i++) {
937     nt = recvs2[cnt++];
938     for (j=0; j<nt; j++) {
939       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
940       cnt += 2 + recvs2[cnt+1];
941     }
942   }
943   ierr = PetscFree(bprocs);CHKERRQ(ierr);
944   ierr = PetscFree(recvs2);CHKERRQ(ierr);
945 
946   /* sort the node indexing by their global numbers */
947   nt = *nproc;
948   for (i=0; i<nt; i++) {
949     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
950     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
951     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
952     ierr = PetscFree(tmp);CHKERRQ(ierr);
953   }
954 
955   if (debug) { /* -----------------------------------  */
956     nt = *nproc;
957     for (i=0; i<nt; i++) {
958       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
959       for (j=0; j<(*numprocs)[i]; j++) {
960         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
961       }
962       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
963     }
964     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
965   } /* -----------------------------------  */
966 
967   /* wait on sends */
968   if (nsends2) {
969     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
970     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
971     ierr = PetscFree(send_status);CHKERRQ(ierr);
972   }
973 
974   ierr = PetscFree(starts3);CHKERRQ(ierr);
975   ierr = PetscFree(dest);CHKERRQ(ierr);
976   ierr = PetscFree(send_waits);CHKERRQ(ierr);
977 
978   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
979   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
980   ierr = PetscFree(starts);CHKERRQ(ierr);
981   ierr = PetscFree(starts2);CHKERRQ(ierr);
982   ierr = PetscFree(lens2);CHKERRQ(ierr);
983 
984   ierr = PetscFree(source);CHKERRQ(ierr);
985   ierr = PetscFree(len);CHKERRQ(ierr);
986   ierr = PetscFree(recvs);CHKERRQ(ierr);
987   ierr = PetscFree(nprocs);CHKERRQ(ierr);
988   ierr = PetscFree(sends2);CHKERRQ(ierr);
989 
990   /* put the information about myself as the first entry in the list */
991   first_procs    = (*procs)[0];
992   first_numprocs = (*numprocs)[0];
993   first_indices  = (*indices)[0];
994   for (i=0; i<*nproc; i++) {
995     if ((*procs)[i] == rank) {
996       (*procs)[0]    = (*procs)[i];
997       (*numprocs)[0] = (*numprocs)[i];
998       (*indices)[0]  = (*indices)[i];
999       (*procs)[i]    = first_procs;
1000       (*numprocs)[i] = first_numprocs;
1001       (*indices)[i]  = first_indices;
1002       break;
1003     }
1004   }
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1010 /*@C
1011     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1012 
1013     Collective on ISLocalToGlobalMapping
1014 
1015     Input Parameters:
1016 .   mapping - the mapping from local to global indexing
1017 
1018     Output Parameter:
1019 +   nproc - number of processors that are connected to this one
1020 .   proc - neighboring processors
1021 .   numproc - number of indices for each processor
1022 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1023 
1024     Level: advanced
1025 
1026 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1027           ISLocalToGlobalMappingGetInfo()
1028 @*/
1029 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1030 {
1031   PetscErrorCode ierr;
1032   PetscInt       i;
1033 
1034   PetscFunctionBegin;
1035   ierr = PetscFree(*procs);CHKERRQ(ierr);
1036   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1037   if (*indices) {
1038     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1039     for (i=1; i<*nproc; i++) {
1040       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1041     }
1042     ierr = PetscFree(*indices);CHKERRQ(ierr);
1043   }
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 #undef __FUNCT__
1048 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1049 /*@C
1050    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1051 
1052    Not Collective
1053 
1054    Input Arguments:
1055 . ltog - local to global mapping
1056 
1057    Output Arguments:
1058 . array - array of indices
1059 
1060    Level: advanced
1061 
1062 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1063 @*/
1064 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1065 {
1066   PetscFunctionBegin;
1067   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1068   PetscValidPointer(array,2);
1069   *array = ltog->indices;
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 #undef __FUNCT__
1074 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1075 /*@C
1076    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1077 
1078    Not Collective
1079 
1080    Input Arguments:
1081 + ltog - local to global mapping
1082 - array - array of indices
1083 
1084    Level: advanced
1085 
1086 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1087 @*/
1088 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1089 {
1090   PetscFunctionBegin;
1091   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1092   PetscValidPointer(array,2);
1093   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1094   *array = NULL;
1095   PetscFunctionReturn(0);
1096 }
1097 
1098 #undef __FUNCT__
1099 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1100 /*@C
1101    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1102 
1103    Not Collective
1104 
1105    Input Arguments:
1106 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1107 . n - number of mappings to concatenate
1108 - ltogs - local to global mappings
1109 
1110    Output Arguments:
1111 . ltogcat - new mapping
1112 
1113    Level: advanced
1114 
1115 .seealso: ISLocalToGlobalMappingCreate()
1116 @*/
1117 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1118 {
1119   PetscInt       i,cnt,m,*idx;
1120   PetscErrorCode ierr;
1121 
1122   PetscFunctionBegin;
1123   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1124   if (n > 0) PetscValidPointer(ltogs,3);
1125   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1126   PetscValidPointer(ltogcat,4);
1127   for (cnt=0,i=0; i<n; i++) {
1128     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1129     cnt += m;
1130   }
1131   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1132   for (cnt=0,i=0; i<n; i++) {
1133     const PetscInt *subidx;
1134     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1135     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1136     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1137     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1138     cnt += m;
1139   }
1140   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143