xref: /petsc/src/vec/is/utils/isltog.c (revision 7c8652dd9fb051dfaf30896d504f41ba028df3ea)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISG2LMapApply"
10 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
11 {
12   PetscErrorCode ierr;
13   PetscInt       i,start,end;
14 
15   PetscFunctionBegin;
16   if (!mapping->globals) {
17     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
18   }
19   start = mapping->globalstart;
20   end = mapping->globalend;
21   for (i=0; i<n; i++) {
22     if (in[i] < 0)          out[i] = in[i];
23     else if (in[i] < start) out[i] = -1;
24     else if (in[i] > end)   out[i] = -1;
25     else                    out[i] = mapping->globals[in[i] - start];
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
33 /*@C
34     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
35 
36     Not Collective
37 
38     Input Parameter:
39 .   ltog - local to global mapping
40 
41     Output Parameter:
42 .   n - the number of entries in the local mapping
43 
44     Level: advanced
45 
46     Concepts: mapping^local to global
47 
48 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
49 @*/
50 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
51 {
52   PetscFunctionBegin;
53   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
54   PetscValidIntPointer(n,2);
55   *n = mapping->n;
56   PetscFunctionReturn(0);
57 }
58 
59 #undef __FUNCT__
60 #define __FUNCT__ "ISLocalToGlobalMappingView"
61 /*@C
62     ISLocalToGlobalMappingView - View a local to global mapping
63 
64     Not Collective
65 
66     Input Parameters:
67 +   ltog - local to global mapping
68 -   viewer - viewer
69 
70     Level: advanced
71 
72     Concepts: mapping^local to global
73 
74 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
75 @*/
76 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
77 {
78   PetscInt       i;
79   PetscMPIInt    rank;
80   PetscBool      iascii;
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
85   if (!viewer) {
86     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
87   }
88   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
89 
90   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
91   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
92   if (iascii) {
93     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
94     for (i=0; i<mapping->n; i++) {
95       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
96     }
97     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
98     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
99   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
100   PetscFunctionReturn(0);
101 }
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
105 /*@
106     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
107     ordering and a global parallel ordering.
108 
109     Not collective
110 
111     Input Parameter:
112 .   is - index set containing the global numbers for each local number
113 
114     Output Parameter:
115 .   mapping - new mapping data structure
116 
117     Level: advanced
118 
119     Concepts: mapping^local to global
120 
121 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
122 @*/
123 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
124 {
125   PetscErrorCode ierr;
126   PetscInt       n,bs;
127   const PetscInt *indices;
128   MPI_Comm       comm;
129   PetscBool      isblock;
130 
131   PetscFunctionBegin;
132   PetscValidHeaderSpecific(is,IS_CLASSID,1);
133   PetscValidPointer(mapping,2);
134 
135   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
136   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
137   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
138   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
139   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
140   if (!isblock || bs == 1) {
141     ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
142     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
143   } else {
144     ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
145   }
146   PetscFunctionReturn(0);
147 }
148 
149 #undef __FUNCT__
150 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
151 /*@C
152     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
153     ordering and a global parallel ordering.
154 
155     Collective
156 
157     Input Parameter:
158 +   sf - star forest mapping contiguous local indices to (rank, offset)
159 -   start - first global index on this process
160 
161     Output Parameter:
162 .   mapping - new mapping data structure
163 
164     Level: advanced
165 
166     Concepts: mapping^local to global
167 
168 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
169 @*/
170 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
171 {
172   PetscErrorCode ierr;
173   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
174   const PetscInt *ilocal;
175   MPI_Comm       comm;
176 
177   PetscFunctionBegin;
178   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
179   PetscValidPointer(mapping,3);
180 
181   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
182   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
183   if (ilocal) {
184     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
185   }
186   else maxlocal = nleaves;
187   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
188   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
189   for (i=0; i<nroots; i++) globals[i] = start + i;
190   for (i=0; i<maxlocal; i++) ltog[i] = -1;
191   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
192   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
193   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
194   ierr = PetscFree(globals);CHKERRQ(ierr);
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNCT__
199 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
200 /*@
201     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
202     ordering and a global parallel ordering.
203 
204     Not Collective, but communicator may have more than one process
205 
206     Input Parameters:
207 +   comm - MPI communicator
208 .   n - the number of local elements
209 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
210 -   mode - see PetscCopyMode
211 
212     Output Parameter:
213 .   mapping - new mapping data structure
214 
215     Level: advanced
216 
217     Concepts: mapping^local to global
218 
219 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
220 @*/
221 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
222 {
223   PetscErrorCode ierr;
224   PetscInt       *in;
225 
226   PetscFunctionBegin;
227   if (n) PetscValidIntPointer(indices,3);
228   PetscValidPointer(mapping,4);
229 
230   *mapping = NULL;
231   ierr = ISInitializePackage();CHKERRQ(ierr);
232 
233   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
234                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
235   (*mapping)->n = n;
236   /*
237     Do not create the global to local mapping. This is only created if
238     ISGlobalToLocalMapping() is called
239   */
240   (*mapping)->globals = 0;
241   if (mode == PETSC_COPY_VALUES) {
242     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
243     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
244     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
245     (*mapping)->indices = in;
246   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
247   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
253 /*@
254     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
255        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
256        and VecSetLocalToGlobalMappingBlock().
257 
258     Not Collective, but communicator may have more than one process
259 
260     Input Parameters:
261 +    inmap - original point-wise mapping
262 -    bs - block size
263 
264     Output Parameter:
265 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
266 
267     Level: advanced
268 
269     Concepts: mapping^local to global
270 
271 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
272 @*/
273 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
274 {
275   PetscErrorCode ierr;
276   PetscInt       *ii,i,n;
277 
278   PetscFunctionBegin;
279   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
280   PetscValidPointer(outmap,3);
281   if (bs > 1) {
282     n = inmap->n/bs;
283     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
284     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
285     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
286     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
287   } else {
288     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
289     *outmap = inmap;
290   }
291   PetscFunctionReturn(0);
292 }
293 
294 #undef __FUNCT__
295 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
296 /*@
297     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
298        ISLocalToGlobalMapping
299 
300     Not Collective, but communicator may have more than one process
301 
302     Input Parameter:
303 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
304 - bs - block size
305 
306     Output Parameter:
307 .   outmap - pointwise mapping
308 
309     Level: advanced
310 
311     Concepts: mapping^local to global
312 
313 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
314 @*/
315 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
316 {
317   PetscErrorCode ierr;
318   PetscInt       *ii,i,n;
319 
320   PetscFunctionBegin;
321   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
322   PetscValidPointer(outmap,2);
323   if (bs > 1) {
324     n    = inmap->n*bs;
325     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
326     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
327     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
328   } else {
329     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
330     *outmap = inmap;
331   }
332   PetscFunctionReturn(0);
333 }
334 
335 #undef __FUNCT__
336 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
337 /*@
338    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
339    ordering and a global parallel ordering.
340 
341    Note Collective
342 
343    Input Parameters:
344 .  mapping - mapping data structure
345 
346    Level: advanced
347 
348 .seealso: ISLocalToGlobalMappingCreate()
349 @*/
350 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
351 {
352   PetscErrorCode ierr;
353 
354   PetscFunctionBegin;
355   if (!*mapping) PetscFunctionReturn(0);
356   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
357   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
358   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
359   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
360   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
361   *mapping = 0;
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
367 /*@
368     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
369     a new index set using the global numbering defined in an ISLocalToGlobalMapping
370     context.
371 
372     Not collective
373 
374     Input Parameters:
375 +   mapping - mapping between local and global numbering
376 -   is - index set in local numbering
377 
378     Output Parameters:
379 .   newis - index set in global numbering
380 
381     Level: advanced
382 
383     Concepts: mapping^local to global
384 
385 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
386           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
387 @*/
388 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
389 {
390   PetscErrorCode ierr;
391   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
392   const PetscInt *idxin;
393 
394   PetscFunctionBegin;
395   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
396   PetscValidHeaderSpecific(is,IS_CLASSID,2);
397   PetscValidPointer(newis,3);
398 
399   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
400   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
401   idxmap = mapping->indices;
402 
403   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
404   for (i=0; i<n; i++) {
405     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",idxin[i],Nmax-1,i);
406     idxout[i] = idxmap[idxin[i]];
407   }
408   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
409   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
410   PetscFunctionReturn(0);
411 }
412 
413 #undef __FUNCT__
414 #define __FUNCT__ "ISLocalToGlobalMappingApply"
415 /*@
416    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
417    and converts them to the global numbering.
418 
419    Not collective
420 
421    Input Parameters:
422 +  mapping - the local to global mapping context
423 .  N - number of integers
424 -  in - input indices in local numbering
425 
426    Output Parameter:
427 .  out - indices in global numbering
428 
429    Notes:
430    The in and out array parameters may be identical.
431 
432    Level: advanced
433 
434 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
435           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
436           AOPetscToApplication(), ISGlobalToLocalMappingApply()
437 
438     Concepts: mapping^local to global
439 @*/
440 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
441 {
442   PetscInt       i,Nmax = mapping->n;
443   const PetscInt *idx = mapping->indices;
444 
445   PetscFunctionBegin;
446   for (i=0; i<N; i++) {
447     if (in[i] < 0) {
448       out[i] = in[i];
449       continue;
450     }
451     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
452     out[i] = idx[in[i]];
453   }
454   PetscFunctionReturn(0);
455 }
456 
457 /* -----------------------------------------------------------------------------------------*/
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
461 /*
462     Creates the global fields in the ISLocalToGlobalMapping structure
463 */
464 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
465 {
466   PetscErrorCode ierr;
467   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
468 
469   PetscFunctionBegin;
470   end   = 0;
471   start = PETSC_MAX_INT;
472 
473   for (i=0; i<n; i++) {
474     if (idx[i] < 0) continue;
475     if (idx[i] < start) start = idx[i];
476     if (idx[i] > end)   end   = idx[i];
477   }
478   if (start > end) {start = 0; end = -1;}
479   mapping->globalstart = start;
480   mapping->globalend   = end;
481 
482   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
483   mapping->globals = globals;
484   for (i=0; i<end-start+1; i++) globals[i] = -1;
485   for (i=0; i<n; i++) {
486     if (idx[i] < 0) continue;
487     globals[idx[i] - start] = i;
488   }
489 
490   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
491   PetscFunctionReturn(0);
492 }
493 
494 #undef __FUNCT__
495 #define __FUNCT__ "ISGlobalToLocalMappingApply"
496 /*@
497     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
498     specified with a global numbering.
499 
500     Not collective
501 
502     Input Parameters:
503 +   mapping - mapping between local and global numbering
504 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
505            IS_GTOLM_DROP - drops the indices with no local value from the output list
506 .   n - number of global indices to map
507 -   idx - global indices to map
508 
509     Output Parameters:
510 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
511 -   idxout - local index of each global index, one must pass in an array long enough
512              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
513              idxout == NULL to determine the required length (returned in nout)
514              and then allocate the required space and call ISGlobalToLocalMappingApply()
515              a second time to set the values.
516 
517     Notes:
518     Either nout or idxout may be NULL. idx and idxout may be identical.
519 
520     This is not scalable in memory usage. Each processor requires O(Nglobal) size
521     array to compute these.
522 
523     Level: advanced
524 
525     Developer Note: The manual page states that idx and idxout may be identical but the calling
526        sequence declares idx as const so it cannot be the same as idxout.
527 
528     Concepts: mapping^global to local
529 
530 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
531           ISLocalToGlobalMappingDestroy()
532 @*/
533 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
534                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
535 {
536   PetscInt       i,*globals,nf = 0,tmp,start,end;
537   PetscErrorCode ierr;
538 
539   PetscFunctionBegin;
540   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
541   if (!mapping->globals) {
542     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
543   }
544   globals = mapping->globals;
545   start   = mapping->globalstart;
546   end     = mapping->globalend;
547 
548   if (type == IS_GTOLM_MASK) {
549     if (idxout) {
550       for (i=0; i<n; i++) {
551         if (idx[i] < 0) idxout[i] = idx[i];
552         else if (idx[i] < start) idxout[i] = -1;
553         else if (idx[i] > end)   idxout[i] = -1;
554         else                     idxout[i] = globals[idx[i] - start];
555       }
556     }
557     if (nout) *nout = n;
558   } else {
559     if (idxout) {
560       for (i=0; i<n; i++) {
561         if (idx[i] < 0) continue;
562         if (idx[i] < start) continue;
563         if (idx[i] > end) continue;
564         tmp = globals[idx[i] - start];
565         if (tmp < 0) continue;
566         idxout[nf++] = tmp;
567       }
568     } else {
569       for (i=0; i<n; i++) {
570         if (idx[i] < 0) continue;
571         if (idx[i] < start) continue;
572         if (idx[i] > end) continue;
573         tmp = globals[idx[i] - start];
574         if (tmp < 0) continue;
575         nf++;
576       }
577     }
578     if (nout) *nout = nf;
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
585 /*@C
586     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
587      each index shared by more than one processor
588 
589     Collective on ISLocalToGlobalMapping
590 
591     Input Parameters:
592 .   mapping - the mapping from local to global indexing
593 
594     Output Parameter:
595 +   nproc - number of processors that are connected to this one
596 .   proc - neighboring processors
597 .   numproc - number of indices for each subdomain (processor)
598 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
599 
600     Level: advanced
601 
602     Concepts: mapping^local to global
603 
604     Fortran Usage:
605 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
606 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
607           PetscInt indices[nproc][numprocmax],ierr)
608         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
609         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
610 
611 
612 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
613           ISLocalToGlobalMappingRestoreInfo()
614 @*/
615 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
616 {
617   PetscErrorCode ierr;
618   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
619   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
620   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
621   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
622   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
623   PetscInt       first_procs,first_numprocs,*first_indices;
624   MPI_Request    *recv_waits,*send_waits;
625   MPI_Status     recv_status,*send_status,*recv_statuses;
626   MPI_Comm       comm;
627   PetscBool      debug = PETSC_FALSE;
628 
629   PetscFunctionBegin;
630   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
631   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
632   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
633   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
634   if (size == 1) {
635     *nproc         = 0;
636     *procs         = NULL;
637     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
638     (*numprocs)[0] = 0;
639     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
640     (*indices)[0]  = NULL;
641     PetscFunctionReturn(0);
642   }
643 
644   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
645 
646   /*
647     Notes on ISLocalToGlobalMappingGetInfo
648 
649     globally owned node - the nodes that have been assigned to this processor in global
650            numbering, just for this routine.
651 
652     nontrivial globally owned node - node assigned to this processor that is on a subdomain
653            boundary (i.e. is has more than one local owner)
654 
655     locally owned node - node that exists on this processors subdomain
656 
657     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
658            local subdomain
659   */
660   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
661   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
662   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
663 
664   for (i=0; i<n; i++) {
665     if (lindices[i] > max) max = lindices[i];
666   }
667   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
668   Ng++;
669   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
670   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
671   scale  = Ng/size + 1;
672   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
673   rstart = scale*rank;
674 
675   /* determine ownership ranges of global indices */
676   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
677   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
678 
679   /* determine owners of each local node  */
680   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
681   for (i=0; i<n; i++) {
682     proc             = lindices[i]/scale; /* processor that globally owns this index */
683     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
684     owner[i]         = proc;
685     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
686   }
687   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
688   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
689 
690   /* inform other processors of number of messages and max length*/
691   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
692   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
693 
694   /* post receives for owned rows */
695   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
696   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
697   for (i=0; i<nrecvs; i++) {
698     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
699   }
700 
701   /* pack messages containing lists of local nodes to owners */
702   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
703   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
704   starts[0] = 0;
705   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
706   for (i=0; i<n; i++) {
707     sends[starts[owner[i]]++] = lindices[i];
708     sends[starts[owner[i]]++] = i;
709   }
710   ierr = PetscFree(owner);CHKERRQ(ierr);
711   starts[0] = 0;
712   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
713 
714   /* send the messages */
715   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
716   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
717   cnt = 0;
718   for (i=0; i<size; i++) {
719     if (nprocs[2*i]) {
720       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
721       dest[cnt] = i;
722       cnt++;
723     }
724   }
725   ierr = PetscFree(starts);CHKERRQ(ierr);
726 
727   /* wait on receives */
728   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
729   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
730   cnt  = nrecvs;
731   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
732   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
733   while (cnt) {
734     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
735     /* unpack receives into our local space */
736     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
737     source[imdex] = recv_status.MPI_SOURCE;
738     len[imdex]    = len[imdex]/2;
739     /* count how many local owners for each of my global owned indices */
740     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
741     cnt--;
742   }
743   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
744 
745   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
746   nowned  = 0;
747   nownedm = 0;
748   for (i=0; i<ng; i++) {
749     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
750   }
751 
752   /* create single array to contain rank of all local owners of each globally owned index */
753   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
754   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
755   starts[0] = 0;
756   for (i=1; i<ng; i++) {
757     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
758     else starts[i] = starts[i-1];
759   }
760 
761   /* for each nontrival globally owned node list all arriving processors */
762   for (i=0; i<nrecvs; i++) {
763     for (j=0; j<len[i]; j++) {
764       node = recvs[2*i*nmax+2*j]-rstart;
765       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
766     }
767   }
768 
769   if (debug) { /* -----------------------------------  */
770     starts[0] = 0;
771     for (i=1; i<ng; i++) {
772       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
773       else starts[i] = starts[i-1];
774     }
775     for (i=0; i<ng; i++) {
776       if (nownedsenders[i] > 1) {
777         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
778         for (j=0; j<nownedsenders[i]; j++) {
779           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
780         }
781         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
782       }
783     }
784     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
785   } /* -----------------------------------  */
786 
787   /* wait on original sends */
788   if (nsends) {
789     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
790     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
791     ierr = PetscFree(send_status);CHKERRQ(ierr);
792   }
793   ierr = PetscFree(send_waits);CHKERRQ(ierr);
794   ierr = PetscFree(sends);CHKERRQ(ierr);
795   ierr = PetscFree(nprocs);CHKERRQ(ierr);
796 
797   /* pack messages to send back to local owners */
798   starts[0] = 0;
799   for (i=1; i<ng; i++) {
800     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
801     else starts[i] = starts[i-1];
802   }
803   nsends2 = nrecvs;
804   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
805   for (i=0; i<nrecvs; i++) {
806     nprocs[i] = 1;
807     for (j=0; j<len[i]; j++) {
808       node = recvs[2*i*nmax+2*j]-rstart;
809       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
810     }
811   }
812   nt = 0;
813   for (i=0; i<nsends2; i++) nt += nprocs[i];
814 
815   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
816   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
817 
818   starts2[0] = 0;
819   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
820   /*
821      Each message is 1 + nprocs[i] long, and consists of
822        (0) the number of nodes being sent back
823        (1) the local node number,
824        (2) the number of processors sharing it,
825        (3) the processors sharing it
826   */
827   for (i=0; i<nsends2; i++) {
828     cnt = 1;
829     sends2[starts2[i]] = 0;
830     for (j=0; j<len[i]; j++) {
831       node = recvs[2*i*nmax+2*j]-rstart;
832       if (nownedsenders[node] > 1) {
833         sends2[starts2[i]]++;
834         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
835         sends2[starts2[i]+cnt++] = nownedsenders[node];
836         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
837         cnt += nownedsenders[node];
838       }
839     }
840   }
841 
842   /* receive the message lengths */
843   nrecvs2 = nsends;
844   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
845   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
846   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
847   for (i=0; i<nrecvs2; i++) {
848     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
849   }
850 
851   /* send the message lengths */
852   for (i=0; i<nsends2; i++) {
853     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
854   }
855 
856   /* wait on receives of lens */
857   if (nrecvs2) {
858     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
859     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
860     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
861   }
862   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
863 
864   starts3[0] = 0;
865   nt         = 0;
866   for (i=0; i<nrecvs2-1; i++) {
867     starts3[i+1] = starts3[i] + lens2[i];
868     nt          += lens2[i];
869   }
870   if (nrecvs2) nt += lens2[nrecvs2-1];
871 
872   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
873   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
874   for (i=0; i<nrecvs2; i++) {
875     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
876   }
877 
878   /* send the messages */
879   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
880   for (i=0; i<nsends2; i++) {
881     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
882   }
883 
884   /* wait on receives */
885   if (nrecvs2) {
886     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
887     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
888     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
889   }
890   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
891   ierr = PetscFree(nprocs);CHKERRQ(ierr);
892 
893   if (debug) { /* -----------------------------------  */
894     cnt = 0;
895     for (i=0; i<nrecvs2; i++) {
896       nt = recvs2[cnt++];
897       for (j=0; j<nt; j++) {
898         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
899         for (k=0; k<recvs2[cnt+1]; k++) {
900           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
901         }
902         cnt += 2 + recvs2[cnt+1];
903         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
904       }
905     }
906     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
907   } /* -----------------------------------  */
908 
909   /* count number subdomains for each local node */
910   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
911   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
912   cnt  = 0;
913   for (i=0; i<nrecvs2; i++) {
914     nt = recvs2[cnt++];
915     for (j=0; j<nt; j++) {
916       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
917       cnt += 2 + recvs2[cnt+1];
918     }
919   }
920   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
921   *nproc    = nt;
922   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
923   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
924   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
925   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
926   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
927   cnt       = 0;
928   for (i=0; i<size; i++) {
929     if (nprocs[i] > 0) {
930       bprocs[i]        = cnt;
931       (*procs)[cnt]    = i;
932       (*numprocs)[cnt] = nprocs[i];
933       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
934       cnt++;
935     }
936   }
937 
938   /* make the list of subdomains for each nontrivial local node */
939   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
940   cnt  = 0;
941   for (i=0; i<nrecvs2; i++) {
942     nt = recvs2[cnt++];
943     for (j=0; j<nt; j++) {
944       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
945       cnt += 2 + recvs2[cnt+1];
946     }
947   }
948   ierr = PetscFree(bprocs);CHKERRQ(ierr);
949   ierr = PetscFree(recvs2);CHKERRQ(ierr);
950 
951   /* sort the node indexing by their global numbers */
952   nt = *nproc;
953   for (i=0; i<nt; i++) {
954     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
955     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
956     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
957     ierr = PetscFree(tmp);CHKERRQ(ierr);
958   }
959 
960   if (debug) { /* -----------------------------------  */
961     nt = *nproc;
962     for (i=0; i<nt; i++) {
963       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
964       for (j=0; j<(*numprocs)[i]; j++) {
965         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
966       }
967       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
968     }
969     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
970   } /* -----------------------------------  */
971 
972   /* wait on sends */
973   if (nsends2) {
974     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
975     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
976     ierr = PetscFree(send_status);CHKERRQ(ierr);
977   }
978 
979   ierr = PetscFree(starts3);CHKERRQ(ierr);
980   ierr = PetscFree(dest);CHKERRQ(ierr);
981   ierr = PetscFree(send_waits);CHKERRQ(ierr);
982 
983   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
984   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
985   ierr = PetscFree(starts);CHKERRQ(ierr);
986   ierr = PetscFree(starts2);CHKERRQ(ierr);
987   ierr = PetscFree(lens2);CHKERRQ(ierr);
988 
989   ierr = PetscFree(source);CHKERRQ(ierr);
990   ierr = PetscFree(len);CHKERRQ(ierr);
991   ierr = PetscFree(recvs);CHKERRQ(ierr);
992   ierr = PetscFree(nprocs);CHKERRQ(ierr);
993   ierr = PetscFree(sends2);CHKERRQ(ierr);
994 
995   /* put the information about myself as the first entry in the list */
996   first_procs    = (*procs)[0];
997   first_numprocs = (*numprocs)[0];
998   first_indices  = (*indices)[0];
999   for (i=0; i<*nproc; i++) {
1000     if ((*procs)[i] == rank) {
1001       (*procs)[0]    = (*procs)[i];
1002       (*numprocs)[0] = (*numprocs)[i];
1003       (*indices)[0]  = (*indices)[i];
1004       (*procs)[i]    = first_procs;
1005       (*numprocs)[i] = first_numprocs;
1006       (*indices)[i]  = first_indices;
1007       break;
1008     }
1009   }
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1015 /*@C
1016     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1017 
1018     Collective on ISLocalToGlobalMapping
1019 
1020     Input Parameters:
1021 .   mapping - the mapping from local to global indexing
1022 
1023     Output Parameter:
1024 +   nproc - number of processors that are connected to this one
1025 .   proc - neighboring processors
1026 .   numproc - number of indices for each processor
1027 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1028 
1029     Level: advanced
1030 
1031 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1032           ISLocalToGlobalMappingGetInfo()
1033 @*/
1034 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1035 {
1036   PetscErrorCode ierr;
1037   PetscInt       i;
1038 
1039   PetscFunctionBegin;
1040   ierr = PetscFree(*procs);CHKERRQ(ierr);
1041   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1042   if (*indices) {
1043     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1044     for (i=1; i<*nproc; i++) {
1045       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1046     }
1047     ierr = PetscFree(*indices);CHKERRQ(ierr);
1048   }
1049   PetscFunctionReturn(0);
1050 }
1051 
1052 #undef __FUNCT__
1053 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1054 /*@C
1055    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1056 
1057    Not Collective
1058 
1059    Input Arguments:
1060 . ltog - local to global mapping
1061 
1062    Output Arguments:
1063 . array - array of indices
1064 
1065    Level: advanced
1066 
1067 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1068 @*/
1069 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1070 {
1071   PetscFunctionBegin;
1072   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1073   PetscValidPointer(array,2);
1074   *array = ltog->indices;
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 #undef __FUNCT__
1079 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1080 /*@C
1081    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1082 
1083    Not Collective
1084 
1085    Input Arguments:
1086 + ltog - local to global mapping
1087 - array - array of indices
1088 
1089    Level: advanced
1090 
1091 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1092 @*/
1093 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1094 {
1095   PetscFunctionBegin;
1096   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1097   PetscValidPointer(array,2);
1098   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1099   *array = NULL;
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 #undef __FUNCT__
1104 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1105 /*@C
1106    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1107 
1108    Not Collective
1109 
1110    Input Arguments:
1111 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1112 . n - number of mappings to concatenate
1113 - ltogs - local to global mappings
1114 
1115    Output Arguments:
1116 . ltogcat - new mapping
1117 
1118    Level: advanced
1119 
1120 .seealso: ISLocalToGlobalMappingCreate()
1121 @*/
1122 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1123 {
1124   PetscInt       i,cnt,m,*idx;
1125   PetscErrorCode ierr;
1126 
1127   PetscFunctionBegin;
1128   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1129   if (n > 0) PetscValidPointer(ltogs,3);
1130   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1131   PetscValidPointer(ltogcat,4);
1132   for (cnt=0,i=0; i<n; i++) {
1133     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1134     cnt += m;
1135   }
1136   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1137   for (cnt=0,i=0; i<n; i++) {
1138     const PetscInt *subidx;
1139     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1140     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1141     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1142     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1143     cnt += m;
1144   }
1145   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1146   PetscFunctionReturn(0);
1147 }
1148