xref: /petsc/src/vec/is/utils/isltog.c (revision 98c3331e5c81c4bfa5036a9b6bc521ec2d439166)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@C
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Level: advanced
120 
121     Concepts: mapping^local to global
122 
123 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
124 @*/
125 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
126 {
127   PetscErrorCode ierr;
128   PetscInt       n,bs;
129   const PetscInt *indices;
130   MPI_Comm       comm;
131   PetscBool      isblock;
132 
133   PetscFunctionBegin;
134   PetscValidHeaderSpecific(is,IS_CLASSID,1);
135   PetscValidPointer(mapping,2);
136 
137   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
138   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
139   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
142   if (!isblock || bs == 1) {
143     ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
144     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
145   } else {
146     ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
147   }
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
153 /*@C
154     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
155     ordering and a global parallel ordering.
156 
157     Collective
158 
159     Input Parameter:
160 +   sf - star forest mapping contiguous local indices to (rank, offset)
161 -   start - first global index on this process
162 
163     Output Parameter:
164 .   mapping - new mapping data structure
165 
166     Level: advanced
167 
168     Concepts: mapping^local to global
169 
170 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
171 @*/
172 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
173 {
174   PetscErrorCode ierr;
175   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
176   const PetscInt *ilocal;
177   MPI_Comm       comm;
178 
179   PetscFunctionBegin;
180   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
181   PetscValidPointer(mapping,3);
182 
183   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
184   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
185   if (ilocal) {
186     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
187   }
188   else maxlocal = nleaves;
189   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
190   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
191   for (i=0; i<nroots; i++) globals[i] = start + i;
192   for (i=0; i<maxlocal; i++) ltog[i] = -1;
193   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
194   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
195   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
196   ierr = PetscFree(globals);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNCT__
201 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
202 /*@
203     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
204     ordering and a global parallel ordering.
205 
206     Not Collective, but communicator may have more than one process
207 
208     Input Parameters:
209 +   comm - MPI communicator
210 .   n - the number of local elements
211 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
212 -   mode - see PetscCopyMode
213 
214     Output Parameter:
215 .   mapping - new mapping data structure
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
224 {
225   PetscErrorCode ierr;
226   PetscInt       *in;
227 
228   PetscFunctionBegin;
229   if (n) PetscValidIntPointer(indices,3);
230   PetscValidPointer(mapping,4);
231 
232   *mapping = NULL;
233   ierr = ISInitializePackage();CHKERRQ(ierr);
234 
235   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
236                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
237   (*mapping)->n = n;
238   /*
239     Do not create the global to local mapping. This is only created if
240     ISGlobalToLocalMapping() is called
241   */
242   (*mapping)->globals = 0;
243   if (mode == PETSC_COPY_VALUES) {
244     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
245     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
246     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
247     (*mapping)->indices = in;
248   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
249   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
255 /*@
256     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
257        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
258        and VecSetLocalToGlobalMappingBlock().
259 
260     Not Collective, but communicator may have more than one process
261 
262     Input Parameters:
263 +    inmap - original point-wise mapping
264 -    bs - block size
265 
266     Output Parameter:
267 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
268 
269     Level: advanced
270 
271     Concepts: mapping^local to global
272 
273 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
274 @*/
275 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
276 {
277   PetscErrorCode ierr;
278   PetscInt       *ii,i,n;
279 
280   PetscFunctionBegin;
281   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
282   PetscValidPointer(outmap,3);
283   if (bs > 1) {
284     n = inmap->n/bs;
285     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
286     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
287     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
288     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
289   } else {
290     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
291     *outmap = inmap;
292   }
293   PetscFunctionReturn(0);
294 }
295 
296 #undef __FUNCT__
297 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
298 /*@
299     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
300        ISLocalToGlobalMapping
301 
302     Not Collective, but communicator may have more than one process
303 
304     Input Parameter:
305 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
306 - bs - block size
307 
308     Output Parameter:
309 .   outmap - pointwise mapping
310 
311     Level: advanced
312 
313     Concepts: mapping^local to global
314 
315 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
316 @*/
317 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
318 {
319   PetscErrorCode ierr;
320   PetscInt       *ii,i,n;
321 
322   PetscFunctionBegin;
323   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
324   PetscValidPointer(outmap,2);
325   if (bs > 1) {
326     n    = inmap->n*bs;
327     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
328     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
329     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
330   } else {
331     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
332     *outmap = inmap;
333   }
334   PetscFunctionReturn(0);
335 }
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
339 /*@
340    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
341    ordering and a global parallel ordering.
342 
343    Note Collective
344 
345    Input Parameters:
346 .  mapping - mapping data structure
347 
348    Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingCreate()
351 @*/
352 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
353 {
354   PetscErrorCode ierr;
355 
356   PetscFunctionBegin;
357   if (!*mapping) PetscFunctionReturn(0);
358   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
359   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
360   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
361   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
362   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
363   *mapping = 0;
364   PetscFunctionReturn(0);
365 }
366 
367 #undef __FUNCT__
368 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
369 /*@
370     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
371     a new index set using the global numbering defined in an ISLocalToGlobalMapping
372     context.
373 
374     Not collective
375 
376     Input Parameters:
377 +   mapping - mapping between local and global numbering
378 -   is - index set in local numbering
379 
380     Output Parameters:
381 .   newis - index set in global numbering
382 
383     Level: advanced
384 
385     Concepts: mapping^local to global
386 
387 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
388           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
389 @*/
390 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
391 {
392   PetscErrorCode ierr;
393   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
394   const PetscInt *idxin;
395 
396   PetscFunctionBegin;
397   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
398   PetscValidHeaderSpecific(is,IS_CLASSID,2);
399   PetscValidPointer(newis,3);
400 
401   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
402   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
403   idxmap = mapping->indices;
404 
405   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
406   for (i=0; i<n; i++) {
407     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",idxin[i],Nmax-1,i);
408     idxout[i] = idxmap[idxin[i]];
409   }
410   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
411   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
412   PetscFunctionReturn(0);
413 }
414 
415 #undef __FUNCT__
416 #define __FUNCT__ "ISLocalToGlobalMappingApply"
417 /*@
418    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
419    and converts them to the global numbering.
420 
421    Not collective
422 
423    Input Parameters:
424 +  mapping - the local to global mapping context
425 .  N - number of integers
426 -  in - input indices in local numbering
427 
428    Output Parameter:
429 .  out - indices in global numbering
430 
431    Notes:
432    The in and out array parameters may be identical.
433 
434    Level: advanced
435 
436 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
437           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
438           AOPetscToApplication(), ISGlobalToLocalMappingApply()
439 
440     Concepts: mapping^local to global
441 @*/
442 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
443 {
444   PetscInt       i,Nmax = mapping->n;
445   const PetscInt *idx = mapping->indices;
446 
447   PetscFunctionBegin;
448   for (i=0; i<N; i++) {
449     if (in[i] < 0) {
450       out[i] = in[i];
451       continue;
452     }
453     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
454     out[i] = idx[in[i]];
455   }
456   PetscFunctionReturn(0);
457 }
458 
459 /* -----------------------------------------------------------------------------------------*/
460 
461 #undef __FUNCT__
462 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
463 /*
464     Creates the global fields in the ISLocalToGlobalMapping structure
465 */
466 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
467 {
468   PetscErrorCode ierr;
469   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
470 
471   PetscFunctionBegin;
472   end   = 0;
473   start = PETSC_MAX_INT;
474 
475   for (i=0; i<n; i++) {
476     if (idx[i] < 0) continue;
477     if (idx[i] < start) start = idx[i];
478     if (idx[i] > end)   end   = idx[i];
479   }
480   if (start > end) {start = 0; end = -1;}
481   mapping->globalstart = start;
482   mapping->globalend   = end;
483 
484   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
485   mapping->globals = globals;
486   for (i=0; i<end-start+1; i++) globals[i] = -1;
487   for (i=0; i<n; i++) {
488     if (idx[i] < 0) continue;
489     globals[idx[i] - start] = i;
490   }
491 
492   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "ISGlobalToLocalMappingApply"
498 /*@
499     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
500     specified with a global numbering.
501 
502     Not collective
503 
504     Input Parameters:
505 +   mapping - mapping between local and global numbering
506 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
507            IS_GTOLM_DROP - drops the indices with no local value from the output list
508 .   n - number of global indices to map
509 -   idx - global indices to map
510 
511     Output Parameters:
512 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
513 -   idxout - local index of each global index, one must pass in an array long enough
514              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
515              idxout == NULL to determine the required length (returned in nout)
516              and then allocate the required space and call ISGlobalToLocalMappingApply()
517              a second time to set the values.
518 
519     Notes:
520     Either nout or idxout may be NULL. idx and idxout may be identical.
521 
522     This is not scalable in memory usage. Each processor requires O(Nglobal) size
523     array to compute these.
524 
525     Level: advanced
526 
527     Developer Note: The manual page states that idx and idxout may be identical but the calling
528        sequence declares idx as const so it cannot be the same as idxout.
529 
530     Concepts: mapping^global to local
531 
532 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
533           ISLocalToGlobalMappingDestroy()
534 @*/
535 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
536                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
537 {
538   PetscInt       i,*globals,nf = 0,tmp,start,end;
539   PetscErrorCode ierr;
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
543   if (!mapping->globals) {
544     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
545   }
546   globals = mapping->globals;
547   start   = mapping->globalstart;
548   end     = mapping->globalend;
549 
550   if (type == IS_GTOLM_MASK) {
551     if (idxout) {
552       for (i=0; i<n; i++) {
553         if (idx[i] < 0) idxout[i] = idx[i];
554         else if (idx[i] < start) idxout[i] = -1;
555         else if (idx[i] > end)   idxout[i] = -1;
556         else                     idxout[i] = globals[idx[i] - start];
557       }
558     }
559     if (nout) *nout = n;
560   } else {
561     if (idxout) {
562       for (i=0; i<n; i++) {
563         if (idx[i] < 0) continue;
564         if (idx[i] < start) continue;
565         if (idx[i] > end) continue;
566         tmp = globals[idx[i] - start];
567         if (tmp < 0) continue;
568         idxout[nf++] = tmp;
569       }
570     } else {
571       for (i=0; i<n; i++) {
572         if (idx[i] < 0) continue;
573         if (idx[i] < start) continue;
574         if (idx[i] > end) continue;
575         tmp = globals[idx[i] - start];
576         if (tmp < 0) continue;
577         nf++;
578       }
579     }
580     if (nout) *nout = nf;
581   }
582   PetscFunctionReturn(0);
583 }
584 
585 #undef __FUNCT__
586 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
587 /*@C
588     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
589      each index shared by more than one processor
590 
591     Collective on ISLocalToGlobalMapping
592 
593     Input Parameters:
594 .   mapping - the mapping from local to global indexing
595 
596     Output Parameter:
597 +   nproc - number of processors that are connected to this one
598 .   proc - neighboring processors
599 .   numproc - number of indices for each subdomain (processor)
600 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
601 
602     Level: advanced
603 
604     Concepts: mapping^local to global
605 
606     Fortran Usage:
607 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
608 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
609           PetscInt indices[nproc][numprocmax],ierr)
610         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
611         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
612 
613 
614 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
615           ISLocalToGlobalMappingRestoreInfo()
616 @*/
617 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
618 {
619   PetscErrorCode ierr;
620   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
621   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
622   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
623   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
624   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
625   PetscInt       first_procs,first_numprocs,*first_indices;
626   MPI_Request    *recv_waits,*send_waits;
627   MPI_Status     recv_status,*send_status,*recv_statuses;
628   MPI_Comm       comm;
629   PetscBool      debug = PETSC_FALSE;
630 
631   PetscFunctionBegin;
632   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
633   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
634   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
635   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
636   if (size == 1) {
637     *nproc         = 0;
638     *procs         = NULL;
639     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
640     (*numprocs)[0] = 0;
641     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
642     (*indices)[0]  = NULL;
643     PetscFunctionReturn(0);
644   }
645 
646   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
647 
648   /*
649     Notes on ISLocalToGlobalMappingGetInfo
650 
651     globally owned node - the nodes that have been assigned to this processor in global
652            numbering, just for this routine.
653 
654     nontrivial globally owned node - node assigned to this processor that is on a subdomain
655            boundary (i.e. is has more than one local owner)
656 
657     locally owned node - node that exists on this processors subdomain
658 
659     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
660            local subdomain
661   */
662   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
663   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
664   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
665 
666   for (i=0; i<n; i++) {
667     if (lindices[i] > max) max = lindices[i];
668   }
669   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
670   Ng++;
671   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
672   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
673   scale  = Ng/size + 1;
674   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
675   rstart = scale*rank;
676 
677   /* determine ownership ranges of global indices */
678   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
679   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
680 
681   /* determine owners of each local node  */
682   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
683   for (i=0; i<n; i++) {
684     proc             = lindices[i]/scale; /* processor that globally owns this index */
685     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
686     owner[i]         = proc;
687     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
688   }
689   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
690   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
691 
692   /* inform other processors of number of messages and max length*/
693   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
694   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
695 
696   /* post receives for owned rows */
697   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
698   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
699   for (i=0; i<nrecvs; i++) {
700     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
701   }
702 
703   /* pack messages containing lists of local nodes to owners */
704   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
705   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
706   starts[0] = 0;
707   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
708   for (i=0; i<n; i++) {
709     sends[starts[owner[i]]++] = lindices[i];
710     sends[starts[owner[i]]++] = i;
711   }
712   ierr = PetscFree(owner);CHKERRQ(ierr);
713   starts[0] = 0;
714   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
715 
716   /* send the messages */
717   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
718   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
719   cnt = 0;
720   for (i=0; i<size; i++) {
721     if (nprocs[2*i]) {
722       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
723       dest[cnt] = i;
724       cnt++;
725     }
726   }
727   ierr = PetscFree(starts);CHKERRQ(ierr);
728 
729   /* wait on receives */
730   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
731   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
732   cnt  = nrecvs;
733   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
734   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
735   while (cnt) {
736     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
737     /* unpack receives into our local space */
738     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
739     source[imdex] = recv_status.MPI_SOURCE;
740     len[imdex]    = len[imdex]/2;
741     /* count how many local owners for each of my global owned indices */
742     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
743     cnt--;
744   }
745   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
746 
747   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
748   nowned  = 0;
749   nownedm = 0;
750   for (i=0; i<ng; i++) {
751     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
752   }
753 
754   /* create single array to contain rank of all local owners of each globally owned index */
755   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
756   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
757   starts[0] = 0;
758   for (i=1; i<ng; i++) {
759     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
760     else starts[i] = starts[i-1];
761   }
762 
763   /* for each nontrival globally owned node list all arriving processors */
764   for (i=0; i<nrecvs; i++) {
765     for (j=0; j<len[i]; j++) {
766       node = recvs[2*i*nmax+2*j]-rstart;
767       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
768     }
769   }
770 
771   if (debug) { /* -----------------------------------  */
772     starts[0] = 0;
773     for (i=1; i<ng; i++) {
774       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
775       else starts[i] = starts[i-1];
776     }
777     for (i=0; i<ng; i++) {
778       if (nownedsenders[i] > 1) {
779         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
780         for (j=0; j<nownedsenders[i]; j++) {
781           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
782         }
783         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
784       }
785     }
786     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
787   } /* -----------------------------------  */
788 
789   /* wait on original sends */
790   if (nsends) {
791     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
792     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
793     ierr = PetscFree(send_status);CHKERRQ(ierr);
794   }
795   ierr = PetscFree(send_waits);CHKERRQ(ierr);
796   ierr = PetscFree(sends);CHKERRQ(ierr);
797   ierr = PetscFree(nprocs);CHKERRQ(ierr);
798 
799   /* pack messages to send back to local owners */
800   starts[0] = 0;
801   for (i=1; i<ng; i++) {
802     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
803     else starts[i] = starts[i-1];
804   }
805   nsends2 = nrecvs;
806   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
807   for (i=0; i<nrecvs; i++) {
808     nprocs[i] = 1;
809     for (j=0; j<len[i]; j++) {
810       node = recvs[2*i*nmax+2*j]-rstart;
811       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
812     }
813   }
814   nt = 0;
815   for (i=0; i<nsends2; i++) nt += nprocs[i];
816 
817   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
818   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
819 
820   starts2[0] = 0;
821   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
822   /*
823      Each message is 1 + nprocs[i] long, and consists of
824        (0) the number of nodes being sent back
825        (1) the local node number,
826        (2) the number of processors sharing it,
827        (3) the processors sharing it
828   */
829   for (i=0; i<nsends2; i++) {
830     cnt = 1;
831     sends2[starts2[i]] = 0;
832     for (j=0; j<len[i]; j++) {
833       node = recvs[2*i*nmax+2*j]-rstart;
834       if (nownedsenders[node] > 1) {
835         sends2[starts2[i]]++;
836         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
837         sends2[starts2[i]+cnt++] = nownedsenders[node];
838         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
839         cnt += nownedsenders[node];
840       }
841     }
842   }
843 
844   /* receive the message lengths */
845   nrecvs2 = nsends;
846   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
847   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
848   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
849   for (i=0; i<nrecvs2; i++) {
850     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
851   }
852 
853   /* send the message lengths */
854   for (i=0; i<nsends2; i++) {
855     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
856   }
857 
858   /* wait on receives of lens */
859   if (nrecvs2) {
860     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
861     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
862     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
863   }
864   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
865 
866   starts3[0] = 0;
867   nt         = 0;
868   for (i=0; i<nrecvs2-1; i++) {
869     starts3[i+1] = starts3[i] + lens2[i];
870     nt          += lens2[i];
871   }
872   if (nrecvs2) nt += lens2[nrecvs2-1];
873 
874   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
875   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
876   for (i=0; i<nrecvs2; i++) {
877     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
878   }
879 
880   /* send the messages */
881   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
882   for (i=0; i<nsends2; i++) {
883     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
884   }
885 
886   /* wait on receives */
887   if (nrecvs2) {
888     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
889     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
890     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
891   }
892   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
893   ierr = PetscFree(nprocs);CHKERRQ(ierr);
894 
895   if (debug) { /* -----------------------------------  */
896     cnt = 0;
897     for (i=0; i<nrecvs2; i++) {
898       nt = recvs2[cnt++];
899       for (j=0; j<nt; j++) {
900         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
901         for (k=0; k<recvs2[cnt+1]; k++) {
902           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
903         }
904         cnt += 2 + recvs2[cnt+1];
905         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
906       }
907     }
908     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
909   } /* -----------------------------------  */
910 
911   /* count number subdomains for each local node */
912   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
913   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
914   cnt  = 0;
915   for (i=0; i<nrecvs2; i++) {
916     nt = recvs2[cnt++];
917     for (j=0; j<nt; j++) {
918       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
919       cnt += 2 + recvs2[cnt+1];
920     }
921   }
922   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
923   *nproc    = nt;
924   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
925   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
926   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
927   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
928   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
929   cnt       = 0;
930   for (i=0; i<size; i++) {
931     if (nprocs[i] > 0) {
932       bprocs[i]        = cnt;
933       (*procs)[cnt]    = i;
934       (*numprocs)[cnt] = nprocs[i];
935       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
936       cnt++;
937     }
938   }
939 
940   /* make the list of subdomains for each nontrivial local node */
941   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
942   cnt  = 0;
943   for (i=0; i<nrecvs2; i++) {
944     nt = recvs2[cnt++];
945     for (j=0; j<nt; j++) {
946       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
947       cnt += 2 + recvs2[cnt+1];
948     }
949   }
950   ierr = PetscFree(bprocs);CHKERRQ(ierr);
951   ierr = PetscFree(recvs2);CHKERRQ(ierr);
952 
953   /* sort the node indexing by their global numbers */
954   nt = *nproc;
955   for (i=0; i<nt; i++) {
956     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
957     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
958     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
959     ierr = PetscFree(tmp);CHKERRQ(ierr);
960   }
961 
962   if (debug) { /* -----------------------------------  */
963     nt = *nproc;
964     for (i=0; i<nt; i++) {
965       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
966       for (j=0; j<(*numprocs)[i]; j++) {
967         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
968       }
969       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
970     }
971     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
972   } /* -----------------------------------  */
973 
974   /* wait on sends */
975   if (nsends2) {
976     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
977     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
978     ierr = PetscFree(send_status);CHKERRQ(ierr);
979   }
980 
981   ierr = PetscFree(starts3);CHKERRQ(ierr);
982   ierr = PetscFree(dest);CHKERRQ(ierr);
983   ierr = PetscFree(send_waits);CHKERRQ(ierr);
984 
985   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
986   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
987   ierr = PetscFree(starts);CHKERRQ(ierr);
988   ierr = PetscFree(starts2);CHKERRQ(ierr);
989   ierr = PetscFree(lens2);CHKERRQ(ierr);
990 
991   ierr = PetscFree(source);CHKERRQ(ierr);
992   ierr = PetscFree(len);CHKERRQ(ierr);
993   ierr = PetscFree(recvs);CHKERRQ(ierr);
994   ierr = PetscFree(nprocs);CHKERRQ(ierr);
995   ierr = PetscFree(sends2);CHKERRQ(ierr);
996 
997   /* put the information about myself as the first entry in the list */
998   first_procs    = (*procs)[0];
999   first_numprocs = (*numprocs)[0];
1000   first_indices  = (*indices)[0];
1001   for (i=0; i<*nproc; i++) {
1002     if ((*procs)[i] == rank) {
1003       (*procs)[0]    = (*procs)[i];
1004       (*numprocs)[0] = (*numprocs)[i];
1005       (*indices)[0]  = (*indices)[i];
1006       (*procs)[i]    = first_procs;
1007       (*numprocs)[i] = first_numprocs;
1008       (*indices)[i]  = first_indices;
1009       break;
1010     }
1011   }
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 #undef __FUNCT__
1016 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1017 /*@C
1018     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1019 
1020     Collective on ISLocalToGlobalMapping
1021 
1022     Input Parameters:
1023 .   mapping - the mapping from local to global indexing
1024 
1025     Output Parameter:
1026 +   nproc - number of processors that are connected to this one
1027 .   proc - neighboring processors
1028 .   numproc - number of indices for each processor
1029 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1030 
1031     Level: advanced
1032 
1033 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1034           ISLocalToGlobalMappingGetInfo()
1035 @*/
1036 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1037 {
1038   PetscErrorCode ierr;
1039   PetscInt       i;
1040 
1041   PetscFunctionBegin;
1042   ierr = PetscFree(*procs);CHKERRQ(ierr);
1043   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1044   if (*indices) {
1045     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1046     for (i=1; i<*nproc; i++) {
1047       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1048     }
1049     ierr = PetscFree(*indices);CHKERRQ(ierr);
1050   }
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1056 /*@C
1057    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1058 
1059    Not Collective
1060 
1061    Input Arguments:
1062 . ltog - local to global mapping
1063 
1064    Output Arguments:
1065 . array - array of indices
1066 
1067    Level: advanced
1068 
1069 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1070 @*/
1071 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1072 {
1073   PetscFunctionBegin;
1074   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1075   PetscValidPointer(array,2);
1076   *array = ltog->indices;
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 #undef __FUNCT__
1081 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1082 /*@C
1083    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1084 
1085    Not Collective
1086 
1087    Input Arguments:
1088 + ltog - local to global mapping
1089 - array - array of indices
1090 
1091    Level: advanced
1092 
1093 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1094 @*/
1095 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1096 {
1097   PetscFunctionBegin;
1098   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1099   PetscValidPointer(array,2);
1100   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1101   *array = NULL;
1102   PetscFunctionReturn(0);
1103 }
1104 
1105 #undef __FUNCT__
1106 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1107 /*@C
1108    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1109 
1110    Not Collective
1111 
1112    Input Arguments:
1113 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1114 . n - number of mappings to concatenate
1115 - ltogs - local to global mappings
1116 
1117    Output Arguments:
1118 . ltogcat - new mapping
1119 
1120    Level: advanced
1121 
1122 .seealso: ISLocalToGlobalMappingCreate()
1123 @*/
1124 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1125 {
1126   PetscInt       i,cnt,m,*idx;
1127   PetscErrorCode ierr;
1128 
1129   PetscFunctionBegin;
1130   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1131   if (n > 0) PetscValidPointer(ltogs,3);
1132   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1133   PetscValidPointer(ltogcat,4);
1134   for (cnt=0,i=0; i<n; i++) {
1135     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1136     cnt += m;
1137   }
1138   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1139   for (cnt=0,i=0; i<n; i++) {
1140     const PetscInt *subidx;
1141     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1142     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1143     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1144     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1145     cnt += m;
1146   }
1147   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1148   PetscFunctionReturn(0);
1149 }
1150 
1151 
1152