xref: /petsc/src/vec/is/utils/isltog.c (revision 04a59952075a8203023532fbff2cf20558f6a7c6)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@C
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
95     for (i=0; i<mapping->n; i++) {
96       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
97     }
98     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
99     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
100   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
106 /*@
107     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
108     ordering and a global parallel ordering.
109 
110     Not collective
111 
112     Input Parameter:
113 .   is - index set containing the global numbers for each local number
114 
115     Output Parameter:
116 .   mapping - new mapping data structure
117 
118     Level: advanced
119 
120     Concepts: mapping^local to global
121 
122 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
123 @*/
124 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
125 {
126   PetscErrorCode ierr;
127   PetscInt       n;
128   const PetscInt *indices;
129   MPI_Comm       comm;
130 
131   PetscFunctionBegin;
132   PetscValidHeaderSpecific(is,IS_CLASSID,1);
133   PetscValidPointer(mapping,2);
134 
135   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
136   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
137   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
138   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
139   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
145 /*@C
146     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
147     ordering and a global parallel ordering.
148 
149     Collective
150 
151     Input Parameter:
152 +   sf - star forest mapping contiguous local indices to (rank, offset)
153 -   start - first global index on this process
154 
155     Output Parameter:
156 .   mapping - new mapping data structure
157 
158     Level: advanced
159 
160     Concepts: mapping^local to global
161 
162 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
163 @*/
164 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
165 {
166   PetscErrorCode ierr;
167   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
168   const PetscInt *ilocal;
169   MPI_Comm       comm;
170 
171   PetscFunctionBegin;
172   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
173   PetscValidPointer(mapping,3);
174 
175   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
176   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
177   if (ilocal) {
178     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
179   }
180   else maxlocal = nleaves;
181   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
182   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
183   for (i=0; i<nroots; i++) globals[i] = start + i;
184   for (i=0; i<maxlocal; i++) ltog[i] = -1;
185   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
186   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
187   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
188   ierr = PetscFree(globals);CHKERRQ(ierr);
189   PetscFunctionReturn(0);
190 }
191 
192 #undef __FUNCT__
193 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
194 /*@
195     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
196     ordering and a global parallel ordering.
197 
198     Not Collective, but communicator may have more than one process
199 
200     Input Parameters:
201 +   comm - MPI communicator
202 .   n - the number of local elements
203 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
204 -   mode - see PetscCopyMode
205 
206     Output Parameter:
207 .   mapping - new mapping data structure
208 
209     Level: advanced
210 
211     Concepts: mapping^local to global
212 
213 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
214 @*/
215 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
216 {
217   PetscErrorCode ierr;
218   PetscInt       *in;
219 
220   PetscFunctionBegin;
221   if (n) PetscValidIntPointer(indices,3);
222   PetscValidPointer(mapping,4);
223 
224   *mapping = NULL;
225 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
226   ierr = ISInitializePackage();CHKERRQ(ierr);
227 #endif
228 
229   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
230                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
231   (*mapping)->n = n;
232   /*
233     Do not create the global to local mapping. This is only created if
234     ISGlobalToLocalMapping() is called
235   */
236   (*mapping)->globals = 0;
237   if (mode == PETSC_COPY_VALUES) {
238     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
239     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
240     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
241     (*mapping)->indices = in;
242   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
243   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
244   PetscFunctionReturn(0);
245 }
246 
247 #undef __FUNCT__
248 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
249 /*@
250     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
251        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
252        and VecSetLocalToGlobalMappingBlock().
253 
254     Not Collective, but communicator may have more than one process
255 
256     Input Parameters:
257 +    inmap - original point-wise mapping
258 -    bs - block size
259 
260     Output Parameter:
261 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
262 
263     Level: advanced
264 
265     Concepts: mapping^local to global
266 
267 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
268 @*/
269 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
270 {
271   PetscErrorCode ierr;
272   PetscInt       *ii,i,n;
273 
274   PetscFunctionBegin;
275   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
276   PetscValidPointer(outmap,3);
277   if (bs > 1) {
278     n = inmap->n/bs;
279     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
280     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
281     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
282     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
283   } else {
284     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
285     *outmap = inmap;
286   }
287   PetscFunctionReturn(0);
288 }
289 
290 #undef __FUNCT__
291 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
292 /*@
293     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
294        ISLocalToGlobalMapping
295 
296     Not Collective, but communicator may have more than one process
297 
298     Input Parameter:
299 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
300 - bs - block size
301 
302     Output Parameter:
303 .   outmap - pointwise mapping
304 
305     Level: advanced
306 
307     Concepts: mapping^local to global
308 
309 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
310 @*/
311 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
312 {
313   PetscErrorCode ierr;
314   PetscInt       *ii,i,n;
315 
316   PetscFunctionBegin;
317   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
318   PetscValidPointer(outmap,2);
319   if (bs > 1) {
320     n    = inmap->n*bs;
321     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
322     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
323     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
324   } else {
325     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
326     *outmap = inmap;
327   }
328   PetscFunctionReturn(0);
329 }
330 
331 #undef __FUNCT__
332 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
333 /*@
334    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
335    ordering and a global parallel ordering.
336 
337    Note Collective
338 
339    Input Parameters:
340 .  mapping - mapping data structure
341 
342    Level: advanced
343 
344 .seealso: ISLocalToGlobalMappingCreate()
345 @*/
346 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
347 {
348   PetscErrorCode ierr;
349 
350   PetscFunctionBegin;
351   if (!*mapping) PetscFunctionReturn(0);
352   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
353   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
354   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
355   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
356   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
357   *mapping = 0;
358   PetscFunctionReturn(0);
359 }
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
363 /*@
364     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
365     a new index set using the global numbering defined in an ISLocalToGlobalMapping
366     context.
367 
368     Not collective
369 
370     Input Parameters:
371 +   mapping - mapping between local and global numbering
372 -   is - index set in local numbering
373 
374     Output Parameters:
375 .   newis - index set in global numbering
376 
377     Level: advanced
378 
379     Concepts: mapping^local to global
380 
381 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
382           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
383 @*/
384 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
385 {
386   PetscErrorCode ierr;
387   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
388   const PetscInt *idxin;
389 
390   PetscFunctionBegin;
391   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
392   PetscValidHeaderSpecific(is,IS_CLASSID,2);
393   PetscValidPointer(newis,3);
394 
395   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
396   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
397   idxmap = mapping->indices;
398 
399   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
400   for (i=0; i<n; i++) {
401     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
402     idxout[i] = idxmap[idxin[i]];
403   }
404   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
405   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "ISLocalToGlobalMappingApply"
411 /*@
412    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
413    and converts them to the global numbering.
414 
415    Not collective
416 
417    Input Parameters:
418 +  mapping - the local to global mapping context
419 .  N - number of integers
420 -  in - input indices in local numbering
421 
422    Output Parameter:
423 .  out - indices in global numbering
424 
425    Notes:
426    The in and out array parameters may be identical.
427 
428    Level: advanced
429 
430 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
431           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
432           AOPetscToApplication(), ISGlobalToLocalMappingApply()
433 
434     Concepts: mapping^local to global
435 @*/
436 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
437 {
438   PetscInt       i,Nmax = mapping->n;
439   const PetscInt *idx = mapping->indices;
440 
441   PetscFunctionBegin;
442   for (i=0; i<N; i++) {
443     if (in[i] < 0) {
444       out[i] = in[i];
445       continue;
446     }
447     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
448     out[i] = idx[in[i]];
449   }
450   PetscFunctionReturn(0);
451 }
452 
453 /* -----------------------------------------------------------------------------------------*/
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
457 /*
458     Creates the global fields in the ISLocalToGlobalMapping structure
459 */
460 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
461 {
462   PetscErrorCode ierr;
463   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
464 
465   PetscFunctionBegin;
466   end   = 0;
467   start = PETSC_MAX_INT;
468 
469   for (i=0; i<n; i++) {
470     if (idx[i] < 0) continue;
471     if (idx[i] < start) start = idx[i];
472     if (idx[i] > end)   end   = idx[i];
473   }
474   if (start > end) {start = 0; end = -1;}
475   mapping->globalstart = start;
476   mapping->globalend   = end;
477 
478   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
479   mapping->globals = globals;
480   for (i=0; i<end-start+1; i++) globals[i] = -1;
481   for (i=0; i<n; i++) {
482     if (idx[i] < 0) continue;
483     globals[idx[i] - start] = i;
484   }
485 
486   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
487   PetscFunctionReturn(0);
488 }
489 
490 #undef __FUNCT__
491 #define __FUNCT__ "ISGlobalToLocalMappingApply"
492 /*@
493     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
494     specified with a global numbering.
495 
496     Not collective
497 
498     Input Parameters:
499 +   mapping - mapping between local and global numbering
500 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
501            IS_GTOLM_DROP - drops the indices with no local value from the output list
502 .   n - number of global indices to map
503 -   idx - global indices to map
504 
505     Output Parameters:
506 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
507 -   idxout - local index of each global index, one must pass in an array long enough
508              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
509              idxout == NULL to determine the required length (returned in nout)
510              and then allocate the required space and call ISGlobalToLocalMappingApply()
511              a second time to set the values.
512 
513     Notes:
514     Either nout or idxout may be NULL. idx and idxout may be identical.
515 
516     This is not scalable in memory usage. Each processor requires O(Nglobal) size
517     array to compute these.
518 
519     Level: advanced
520 
521     Developer Note: The manual page states that idx and idxout may be identical but the calling
522        sequence declares idx as const so it cannot be the same as idxout.
523 
524     Concepts: mapping^global to local
525 
526 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
527           ISLocalToGlobalMappingDestroy()
528 @*/
529 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
530                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
531 {
532   PetscInt       i,*globals,nf = 0,tmp,start,end;
533   PetscErrorCode ierr;
534 
535   PetscFunctionBegin;
536   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
537   if (!mapping->globals) {
538     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
539   }
540   globals = mapping->globals;
541   start   = mapping->globalstart;
542   end     = mapping->globalend;
543 
544   if (type == IS_GTOLM_MASK) {
545     if (idxout) {
546       for (i=0; i<n; i++) {
547         if (idx[i] < 0) idxout[i] = idx[i];
548         else if (idx[i] < start) idxout[i] = -1;
549         else if (idx[i] > end)   idxout[i] = -1;
550         else                     idxout[i] = globals[idx[i] - start];
551       }
552     }
553     if (nout) *nout = n;
554   } else {
555     if (idxout) {
556       for (i=0; i<n; i++) {
557         if (idx[i] < 0) continue;
558         if (idx[i] < start) continue;
559         if (idx[i] > end) continue;
560         tmp = globals[idx[i] - start];
561         if (tmp < 0) continue;
562         idxout[nf++] = tmp;
563       }
564     } else {
565       for (i=0; i<n; i++) {
566         if (idx[i] < 0) continue;
567         if (idx[i] < start) continue;
568         if (idx[i] > end) continue;
569         tmp = globals[idx[i] - start];
570         if (tmp < 0) continue;
571         nf++;
572       }
573     }
574     if (nout) *nout = nf;
575   }
576   PetscFunctionReturn(0);
577 }
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
581 /*@C
582     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
583      each index shared by more than one processor
584 
585     Collective on ISLocalToGlobalMapping
586 
587     Input Parameters:
588 .   mapping - the mapping from local to global indexing
589 
590     Output Parameter:
591 +   nproc - number of processors that are connected to this one
592 .   proc - neighboring processors
593 .   numproc - number of indices for each subdomain (processor)
594 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
595 
596     Level: advanced
597 
598     Concepts: mapping^local to global
599 
600     Fortran Usage:
601 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
602 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
603           PetscInt indices[nproc][numprocmax],ierr)
604         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
605         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
606 
607 
608 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
609           ISLocalToGlobalMappingRestoreInfo()
610 @*/
611 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
612 {
613   PetscErrorCode ierr;
614   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
615   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
616   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
617   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
618   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
619   PetscInt       first_procs,first_numprocs,*first_indices;
620   MPI_Request    *recv_waits,*send_waits;
621   MPI_Status     recv_status,*send_status,*recv_statuses;
622   MPI_Comm       comm;
623   PetscBool      debug = PETSC_FALSE;
624 
625   PetscFunctionBegin;
626   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
627   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
628   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
629   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
630   if (size == 1) {
631     *nproc         = 0;
632     *procs         = NULL;
633     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
634     (*numprocs)[0] = 0;
635     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
636     (*indices)[0]  = NULL;
637     PetscFunctionReturn(0);
638   }
639 
640   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
641 
642   /*
643     Notes on ISLocalToGlobalMappingGetInfo
644 
645     globally owned node - the nodes that have been assigned to this processor in global
646            numbering, just for this routine.
647 
648     nontrivial globally owned node - node assigned to this processor that is on a subdomain
649            boundary (i.e. is has more than one local owner)
650 
651     locally owned node - node that exists on this processors subdomain
652 
653     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
654            local subdomain
655   */
656   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
657   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
658   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
659 
660   for (i=0; i<n; i++) {
661     if (lindices[i] > max) max = lindices[i];
662   }
663   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
664   Ng++;
665   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
666   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
667   scale  = Ng/size + 1;
668   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
669   rstart = scale*rank;
670 
671   /* determine ownership ranges of global indices */
672   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
673   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
674 
675   /* determine owners of each local node  */
676   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
677   for (i=0; i<n; i++) {
678     proc             = lindices[i]/scale; /* processor that globally owns this index */
679     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
680     owner[i]         = proc;
681     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
682   }
683   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
684   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
685 
686   /* inform other processors of number of messages and max length*/
687   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
688   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
689 
690   /* post receives for owned rows */
691   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
692   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
693   for (i=0; i<nrecvs; i++) {
694     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
695   }
696 
697   /* pack messages containing lists of local nodes to owners */
698   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
699   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
700   starts[0] = 0;
701   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
702   for (i=0; i<n; i++) {
703     sends[starts[owner[i]]++] = lindices[i];
704     sends[starts[owner[i]]++] = i;
705   }
706   ierr = PetscFree(owner);CHKERRQ(ierr);
707   starts[0] = 0;
708   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
709 
710   /* send the messages */
711   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
712   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
713   cnt = 0;
714   for (i=0; i<size; i++) {
715     if (nprocs[2*i]) {
716       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
717       dest[cnt] = i;
718       cnt++;
719     }
720   }
721   ierr = PetscFree(starts);CHKERRQ(ierr);
722 
723   /* wait on receives */
724   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
725   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
726   cnt  = nrecvs;
727   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
728   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
729   while (cnt) {
730     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
731     /* unpack receives into our local space */
732     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
733     source[imdex] = recv_status.MPI_SOURCE;
734     len[imdex]    = len[imdex]/2;
735     /* count how many local owners for each of my global owned indices */
736     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
737     cnt--;
738   }
739   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
740 
741   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
742   nowned  = 0;
743   nownedm = 0;
744   for (i=0; i<ng; i++) {
745     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
746   }
747 
748   /* create single array to contain rank of all local owners of each globally owned index */
749   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
750   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
751   starts[0] = 0;
752   for (i=1; i<ng; i++) {
753     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
754     else starts[i] = starts[i-1];
755   }
756 
757   /* for each nontrival globally owned node list all arriving processors */
758   for (i=0; i<nrecvs; i++) {
759     for (j=0; j<len[i]; j++) {
760       node = recvs[2*i*nmax+2*j]-rstart;
761       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
762     }
763   }
764 
765   if (debug) { /* -----------------------------------  */
766     starts[0] = 0;
767     for (i=1; i<ng; i++) {
768       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
769       else starts[i] = starts[i-1];
770     }
771     for (i=0; i<ng; i++) {
772       if (nownedsenders[i] > 1) {
773         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
774         for (j=0; j<nownedsenders[i]; j++) {
775           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
776         }
777         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
778       }
779     }
780     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
781   } /* -----------------------------------  */
782 
783   /* wait on original sends */
784   if (nsends) {
785     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
786     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
787     ierr = PetscFree(send_status);CHKERRQ(ierr);
788   }
789   ierr = PetscFree(send_waits);CHKERRQ(ierr);
790   ierr = PetscFree(sends);CHKERRQ(ierr);
791   ierr = PetscFree(nprocs);CHKERRQ(ierr);
792 
793   /* pack messages to send back to local owners */
794   starts[0] = 0;
795   for (i=1; i<ng; i++) {
796     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
797     else starts[i] = starts[i-1];
798   }
799   nsends2 = nrecvs;
800   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
801   for (i=0; i<nrecvs; i++) {
802     nprocs[i] = 1;
803     for (j=0; j<len[i]; j++) {
804       node = recvs[2*i*nmax+2*j]-rstart;
805       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
806     }
807   }
808   nt = 0;
809   for (i=0; i<nsends2; i++) nt += nprocs[i];
810 
811   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
812   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
813 
814   starts2[0] = 0;
815   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
816   /*
817      Each message is 1 + nprocs[i] long, and consists of
818        (0) the number of nodes being sent back
819        (1) the local node number,
820        (2) the number of processors sharing it,
821        (3) the processors sharing it
822   */
823   for (i=0; i<nsends2; i++) {
824     cnt = 1;
825     sends2[starts2[i]] = 0;
826     for (j=0; j<len[i]; j++) {
827       node = recvs[2*i*nmax+2*j]-rstart;
828       if (nownedsenders[node] > 1) {
829         sends2[starts2[i]]++;
830         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
831         sends2[starts2[i]+cnt++] = nownedsenders[node];
832         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
833         cnt += nownedsenders[node];
834       }
835     }
836   }
837 
838   /* receive the message lengths */
839   nrecvs2 = nsends;
840   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
841   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
842   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
843   for (i=0; i<nrecvs2; i++) {
844     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
845   }
846 
847   /* send the message lengths */
848   for (i=0; i<nsends2; i++) {
849     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
850   }
851 
852   /* wait on receives of lens */
853   if (nrecvs2) {
854     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
855     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
856     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
857   }
858   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
859 
860   starts3[0] = 0;
861   nt         = 0;
862   for (i=0; i<nrecvs2-1; i++) {
863     starts3[i+1] = starts3[i] + lens2[i];
864     nt          += lens2[i];
865   }
866   if (nrecvs2) nt += lens2[nrecvs2-1];
867 
868   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
869   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
870   for (i=0; i<nrecvs2; i++) {
871     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
872   }
873 
874   /* send the messages */
875   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
876   for (i=0; i<nsends2; i++) {
877     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
878   }
879 
880   /* wait on receives */
881   if (nrecvs2) {
882     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
883     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
884     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
885   }
886   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
887   ierr = PetscFree(nprocs);CHKERRQ(ierr);
888 
889   if (debug) { /* -----------------------------------  */
890     cnt = 0;
891     for (i=0; i<nrecvs2; i++) {
892       nt = recvs2[cnt++];
893       for (j=0; j<nt; j++) {
894         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
895         for (k=0; k<recvs2[cnt+1]; k++) {
896           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
897         }
898         cnt += 2 + recvs2[cnt+1];
899         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
900       }
901     }
902     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
903   } /* -----------------------------------  */
904 
905   /* count number subdomains for each local node */
906   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
907   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
908   cnt  = 0;
909   for (i=0; i<nrecvs2; i++) {
910     nt = recvs2[cnt++];
911     for (j=0; j<nt; j++) {
912       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
913       cnt += 2 + recvs2[cnt+1];
914     }
915   }
916   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
917   *nproc    = nt;
918   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
919   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
920   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
921   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
922   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
923   cnt       = 0;
924   for (i=0; i<size; i++) {
925     if (nprocs[i] > 0) {
926       bprocs[i]        = cnt;
927       (*procs)[cnt]    = i;
928       (*numprocs)[cnt] = nprocs[i];
929       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
930       cnt++;
931     }
932   }
933 
934   /* make the list of subdomains for each nontrivial local node */
935   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
936   cnt  = 0;
937   for (i=0; i<nrecvs2; i++) {
938     nt = recvs2[cnt++];
939     for (j=0; j<nt; j++) {
940       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
941       cnt += 2 + recvs2[cnt+1];
942     }
943   }
944   ierr = PetscFree(bprocs);CHKERRQ(ierr);
945   ierr = PetscFree(recvs2);CHKERRQ(ierr);
946 
947   /* sort the node indexing by their global numbers */
948   nt = *nproc;
949   for (i=0; i<nt; i++) {
950     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
951     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
952     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
953     ierr = PetscFree(tmp);CHKERRQ(ierr);
954   }
955 
956   if (debug) { /* -----------------------------------  */
957     nt = *nproc;
958     for (i=0; i<nt; i++) {
959       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
960       for (j=0; j<(*numprocs)[i]; j++) {
961         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
962       }
963       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
964     }
965     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
966   } /* -----------------------------------  */
967 
968   /* wait on sends */
969   if (nsends2) {
970     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
971     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
972     ierr = PetscFree(send_status);CHKERRQ(ierr);
973   }
974 
975   ierr = PetscFree(starts3);CHKERRQ(ierr);
976   ierr = PetscFree(dest);CHKERRQ(ierr);
977   ierr = PetscFree(send_waits);CHKERRQ(ierr);
978 
979   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
980   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
981   ierr = PetscFree(starts);CHKERRQ(ierr);
982   ierr = PetscFree(starts2);CHKERRQ(ierr);
983   ierr = PetscFree(lens2);CHKERRQ(ierr);
984 
985   ierr = PetscFree(source);CHKERRQ(ierr);
986   ierr = PetscFree(len);CHKERRQ(ierr);
987   ierr = PetscFree(recvs);CHKERRQ(ierr);
988   ierr = PetscFree(nprocs);CHKERRQ(ierr);
989   ierr = PetscFree(sends2);CHKERRQ(ierr);
990 
991   /* put the information about myself as the first entry in the list */
992   first_procs    = (*procs)[0];
993   first_numprocs = (*numprocs)[0];
994   first_indices  = (*indices)[0];
995   for (i=0; i<*nproc; i++) {
996     if ((*procs)[i] == rank) {
997       (*procs)[0]    = (*procs)[i];
998       (*numprocs)[0] = (*numprocs)[i];
999       (*indices)[0]  = (*indices)[i];
1000       (*procs)[i]    = first_procs;
1001       (*numprocs)[i] = first_numprocs;
1002       (*indices)[i]  = first_indices;
1003       break;
1004     }
1005   }
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 #undef __FUNCT__
1010 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1011 /*@C
1012     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1013 
1014     Collective on ISLocalToGlobalMapping
1015 
1016     Input Parameters:
1017 .   mapping - the mapping from local to global indexing
1018 
1019     Output Parameter:
1020 +   nproc - number of processors that are connected to this one
1021 .   proc - neighboring processors
1022 .   numproc - number of indices for each processor
1023 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1024 
1025     Level: advanced
1026 
1027 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1028           ISLocalToGlobalMappingGetInfo()
1029 @*/
1030 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1031 {
1032   PetscErrorCode ierr;
1033   PetscInt       i;
1034 
1035   PetscFunctionBegin;
1036   ierr = PetscFree(*procs);CHKERRQ(ierr);
1037   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1038   if (*indices) {
1039     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1040     for (i=1; i<*nproc; i++) {
1041       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1042     }
1043     ierr = PetscFree(*indices);CHKERRQ(ierr);
1044   }
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 #undef __FUNCT__
1049 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1050 /*@C
1051    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1052 
1053    Not Collective
1054 
1055    Input Arguments:
1056 . ltog - local to global mapping
1057 
1058    Output Arguments:
1059 . array - array of indices
1060 
1061    Level: advanced
1062 
1063 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1064 @*/
1065 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1066 {
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1069   PetscValidPointer(array,2);
1070   *array = ltog->indices;
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1076 /*@C
1077    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1078 
1079    Not Collective
1080 
1081    Input Arguments:
1082 + ltog - local to global mapping
1083 - array - array of indices
1084 
1085    Level: advanced
1086 
1087 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1088 @*/
1089 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1090 {
1091   PetscFunctionBegin;
1092   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1093   PetscValidPointer(array,2);
1094   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1095   *array = NULL;
1096   PetscFunctionReturn(0);
1097 }
1098 
1099 #undef __FUNCT__
1100 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1101 /*@C
1102    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1103 
1104    Not Collective
1105 
1106    Input Arguments:
1107 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1108 . n - number of mappings to concatenate
1109 - ltogs - local to global mappings
1110 
1111    Output Arguments:
1112 . ltogcat - new mapping
1113 
1114    Level: advanced
1115 
1116 .seealso: ISLocalToGlobalMappingCreate()
1117 @*/
1118 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1119 {
1120   PetscInt       i,cnt,m,*idx;
1121   PetscErrorCode ierr;
1122 
1123   PetscFunctionBegin;
1124   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1125   if (n > 0) PetscValidPointer(ltogs,3);
1126   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1127   PetscValidPointer(ltogcat,4);
1128   for (cnt=0,i=0; i<n; i++) {
1129     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1130     cnt += m;
1131   }
1132   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1133   for (cnt=0,i=0; i<n; i++) {
1134     const PetscInt *subidx;
1135     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1136     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1137     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1138     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1139     cnt += m;
1140   }
1141   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 
1146