xref: /petsc/src/vec/is/utils/isltog.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21     Concepts: mapping^local to global
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
29   PetscValidIntPointer(n,2);
30   *n = mapping->n;
31   PetscFunctionReturn(0);
32 }
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "ISLocalToGlobalMappingView"
36 /*@C
37     ISLocalToGlobalMappingView - View a local to global mapping
38 
39     Not Collective
40 
41     Input Parameters:
42 +   ltog - local to global mapping
43 -   viewer - viewer
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
52 {
53   PetscInt       i;
54   PetscMPIInt    rank;
55   PetscBool      iascii;
56   PetscErrorCode ierr;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
60   if (!viewer) {
61     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
62   }
63   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
64 
65   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
66   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
67   if (iascii) {
68     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
74   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local number
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   PetscInt       n;
102   const PetscInt *indices;
103   MPI_Comm       comm;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(is,IS_CLASSID,1);
107   PetscValidPointer(mapping,2);
108 
109   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
110   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
111   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
112   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
113   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
119 /*@C
120     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
121     ordering and a global parallel ordering.
122 
123     Collective
124 
125     Input Parameter:
126 +   sf - star forest mapping contiguous local indices to (rank, offset)
127 -   start - first global index on this process
128 
129     Output Parameter:
130 .   mapping - new mapping data structure
131 
132     Level: advanced
133 
134     Concepts: mapping^local to global
135 
136 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
137 @*/
138 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
139 {
140   PetscErrorCode ierr;
141   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
142   const PetscInt *ilocal;
143   MPI_Comm       comm;
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
147   PetscValidPointer(mapping,3);
148 
149   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
150   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,PETSC_NULL);CHKERRQ(ierr);
151   if (ilocal) {
152     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
153   }
154   else maxlocal = nleaves;
155   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
156   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
157   for (i=0; i<nroots; i++) globals[i] = start + i;
158   for (i=0; i<maxlocal; i++) ltog[i] = -1;
159   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
160   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
161   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
162   ierr = PetscFree(globals);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
168 /*@
169     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
170     ordering and a global parallel ordering.
171 
172     Not Collective, but communicator may have more than one process
173 
174     Input Parameters:
175 +   comm - MPI communicator
176 .   n - the number of local elements
177 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
178 -   mode - see PetscCopyMode
179 
180     Output Parameter:
181 .   mapping - new mapping data structure
182 
183     Level: advanced
184 
185     Concepts: mapping^local to global
186 
187 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
188 @*/
189 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
190 {
191   PetscErrorCode ierr;
192   PetscInt       *in;
193 
194   PetscFunctionBegin;
195   if (n) PetscValidIntPointer(indices,3);
196   PetscValidPointer(mapping,4);
197 
198   *mapping = PETSC_NULL;
199 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
200   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
201 #endif
202 
203   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
204                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
205   (*mapping)->n = n;
206   /*
207     Do not create the global to local mapping. This is only created if
208     ISGlobalToLocalMapping() is called
209   */
210   (*mapping)->globals = 0;
211   if (mode == PETSC_COPY_VALUES) {
212     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
213     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
214     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
215     (*mapping)->indices = in;
216   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
217   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
223 /*@
224     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
225        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
226        and VecSetLocalToGlobalMappingBlock().
227 
228     Not Collective, but communicator may have more than one process
229 
230     Input Parameters:
231 +    inmap - original point-wise mapping
232 -    bs - block size
233 
234     Output Parameter:
235 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
242 @*/
243 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
244 {
245   PetscErrorCode ierr;
246   PetscInt       *ii,i,n;
247 
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
250   PetscValidPointer(outmap,3);
251   if (bs > 1) {
252     n = inmap->n/bs;
253     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
254     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
255     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
256     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
257   } else {
258     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
259     *outmap = inmap;
260   }
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
266 /*@
267     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
268        ISLocalToGlobalMapping
269 
270     Not Collective, but communicator may have more than one process
271 
272     Input Parameter:
273 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
274 - bs - block size
275 
276     Output Parameter:
277 .   outmap - pointwise mapping
278 
279     Level: advanced
280 
281     Concepts: mapping^local to global
282 
283 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
284 @*/
285 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
286 {
287   PetscErrorCode ierr;
288   PetscInt       *ii,i,n;
289 
290   PetscFunctionBegin;
291   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
292   PetscValidPointer(outmap,2);
293   if (bs > 1) {
294     n    = inmap->n*bs;
295     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
296     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
297     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
298   } else {
299     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
300     *outmap = inmap;
301   }
302   PetscFunctionReturn(0);
303 }
304 
305 #undef __FUNCT__
306 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
307 /*@
308    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
309    ordering and a global parallel ordering.
310 
311    Note Collective
312 
313    Input Parameters:
314 .  mapping - mapping data structure
315 
316    Level: advanced
317 
318 .seealso: ISLocalToGlobalMappingCreate()
319 @*/
320 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
321 {
322   PetscErrorCode ierr;
323 
324   PetscFunctionBegin;
325   if (!*mapping) PetscFunctionReturn(0);
326   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
327   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
328   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
329   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
330   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
331   *mapping = 0;
332   PetscFunctionReturn(0);
333 }
334 
335 #undef __FUNCT__
336 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
337 /*@
338     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
339     a new index set using the global numbering defined in an ISLocalToGlobalMapping
340     context.
341 
342     Not collective
343 
344     Input Parameters:
345 +   mapping - mapping between local and global numbering
346 -   is - index set in local numbering
347 
348     Output Parameters:
349 .   newis - index set in global numbering
350 
351     Level: advanced
352 
353     Concepts: mapping^local to global
354 
355 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
356           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
357 @*/
358 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
359 {
360   PetscErrorCode ierr;
361   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
362   const PetscInt *idxin;
363 
364   PetscFunctionBegin;
365   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
366   PetscValidHeaderSpecific(is,IS_CLASSID,2);
367   PetscValidPointer(newis,3);
368 
369   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
370   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
371   idxmap = mapping->indices;
372 
373   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
374   for (i=0; i<n; i++) {
375     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
376     idxout[i] = idxmap[idxin[i]];
377   }
378   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
379   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 /*MC
384    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
385    and converts them to the global numbering.
386 
387    Synopsis:
388    #include "petscis.h"
389    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
390 
391    Not collective
392 
393    Input Parameters:
394 +  mapping - the local to global mapping context
395 .  N - number of integers
396 -  in - input indices in local numbering
397 
398    Output Parameter:
399 .  out - indices in global numbering
400 
401    Notes:
402    The in and out array parameters may be identical.
403 
404    Level: advanced
405 
406 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
407           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
408           AOPetscToApplication(), ISGlobalToLocalMappingApply()
409 
410     Concepts: mapping^local to global
411 
412 M*/
413 
414 /* -----------------------------------------------------------------------------------------*/
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
418 /*
419     Creates the global fields in the ISLocalToGlobalMapping structure
420 */
421 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
422 {
423   PetscErrorCode ierr;
424   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
425 
426   PetscFunctionBegin;
427   end   = 0;
428   start = PETSC_MAX_INT;
429 
430   for (i=0; i<n; i++) {
431     if (idx[i] < 0) continue;
432     if (idx[i] < start) start = idx[i];
433     if (idx[i] > end)   end   = idx[i];
434   }
435   if (start > end) {start = 0; end = -1;}
436   mapping->globalstart = start;
437   mapping->globalend   = end;
438 
439   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
440   mapping->globals = globals;
441   for (i=0; i<end-start+1; i++) globals[i] = -1;
442   for (i=0; i<n; i++) {
443     if (idx[i] < 0) continue;
444     globals[idx[i] - start] = i;
445   }
446 
447   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
448   PetscFunctionReturn(0);
449 }
450 
451 #undef __FUNCT__
452 #define __FUNCT__ "ISGlobalToLocalMappingApply"
453 /*@
454     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
455     specified with a global numbering.
456 
457     Not collective
458 
459     Input Parameters:
460 +   mapping - mapping between local and global numbering
461 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
462            IS_GTOLM_DROP - drops the indices with no local value from the output list
463 .   n - number of global indices to map
464 -   idx - global indices to map
465 
466     Output Parameters:
467 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
468 -   idxout - local index of each global index, one must pass in an array long enough
469              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
470              idxout == PETSC_NULL to determine the required length (returned in nout)
471              and then allocate the required space and call ISGlobalToLocalMappingApply()
472              a second time to set the values.
473 
474     Notes:
475     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
476 
477     This is not scalable in memory usage. Each processor requires O(Nglobal) size
478     array to compute these.
479 
480     Level: advanced
481 
482     Developer Note: The manual page states that idx and idxout may be identical but the calling
483        sequence declares idx as const so it cannot be the same as idxout.
484 
485     Concepts: mapping^global to local
486 
487 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
488           ISLocalToGlobalMappingDestroy()
489 @*/
490 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
491                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
492 {
493   PetscInt       i,*globals,nf = 0,tmp,start,end;
494   PetscErrorCode ierr;
495 
496   PetscFunctionBegin;
497   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
498   if (!mapping->globals) {
499     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
500   }
501   globals = mapping->globals;
502   start   = mapping->globalstart;
503   end     = mapping->globalend;
504 
505   if (type == IS_GTOLM_MASK) {
506     if (idxout) {
507       for (i=0; i<n; i++) {
508         if (idx[i] < 0) idxout[i] = idx[i];
509         else if (idx[i] < start) idxout[i] = -1;
510         else if (idx[i] > end)   idxout[i] = -1;
511         else                     idxout[i] = globals[idx[i] - start];
512       }
513     }
514     if (nout) *nout = n;
515   } else {
516     if (idxout) {
517       for (i=0; i<n; i++) {
518         if (idx[i] < 0) continue;
519         if (idx[i] < start) continue;
520         if (idx[i] > end) continue;
521         tmp = globals[idx[i] - start];
522         if (tmp < 0) continue;
523         idxout[nf++] = tmp;
524       }
525     } else {
526       for (i=0; i<n; i++) {
527         if (idx[i] < 0) continue;
528         if (idx[i] < start) continue;
529         if (idx[i] > end) continue;
530         tmp = globals[idx[i] - start];
531         if (tmp < 0) continue;
532         nf++;
533       }
534     }
535     if (nout) *nout = nf;
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
542 /*@C
543     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
544      each index shared by more than one processor
545 
546     Collective on ISLocalToGlobalMapping
547 
548     Input Parameters:
549 .   mapping - the mapping from local to global indexing
550 
551     Output Parameter:
552 +   nproc - number of processors that are connected to this one
553 .   proc - neighboring processors
554 .   numproc - number of indices for each subdomain (processor)
555 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
556 
557     Level: advanced
558 
559     Concepts: mapping^local to global
560 
561     Fortran Usage:
562 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
563 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
564           PetscInt indices[nproc][numprocmax],ierr)
565         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
566         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
567 
568 
569 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
570           ISLocalToGlobalMappingRestoreInfo()
571 @*/
572 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
573 {
574   PetscErrorCode ierr;
575   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
576   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
577   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
578   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
579   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
580   PetscInt       first_procs,first_numprocs,*first_indices;
581   MPI_Request    *recv_waits,*send_waits;
582   MPI_Status     recv_status,*send_status,*recv_statuses;
583   MPI_Comm       comm  = ((PetscObject)mapping)->comm;
584   PetscBool      debug = PETSC_FALSE;
585 
586   PetscFunctionBegin;
587   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
588   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
589   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
590   if (size == 1) {
591     *nproc         = 0;
592     *procs         = PETSC_NULL;
593     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
594     (*numprocs)[0] = 0;
595     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
596     (*indices)[0]  = PETSC_NULL;
597     PetscFunctionReturn(0);
598   }
599 
600   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
601 
602   /*
603     Notes on ISLocalToGlobalMappingGetInfo
604 
605     globally owned node - the nodes that have been assigned to this processor in global
606            numbering, just for this routine.
607 
608     nontrivial globally owned node - node assigned to this processor that is on a subdomain
609            boundary (i.e. is has more than one local owner)
610 
611     locally owned node - node that exists on this processors subdomain
612 
613     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
614            local subdomain
615   */
616   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
617   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
618   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
619 
620   for (i=0; i<n; i++) {
621     if (lindices[i] > max) max = lindices[i];
622   }
623   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
624   Ng++;
625   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
626   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
627   scale  = Ng/size + 1;
628   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
629   rstart = scale*rank;
630 
631   /* determine ownership ranges of global indices */
632   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
633   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
634 
635   /* determine owners of each local node  */
636   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
637   for (i=0; i<n; i++) {
638     proc             = lindices[i]/scale; /* processor that globally owns this index */
639     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
640     owner[i]         = proc;
641     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
642   }
643   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
644   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
645 
646   /* inform other processors of number of messages and max length*/
647   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
648   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
649 
650   /* post receives for owned rows */
651   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
652   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
653   for (i=0; i<nrecvs; i++) {
654     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
655   }
656 
657   /* pack messages containing lists of local nodes to owners */
658   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
659   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
660   starts[0] = 0;
661   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
662   for (i=0; i<n; i++) {
663     sends[starts[owner[i]]++] = lindices[i];
664     sends[starts[owner[i]]++] = i;
665   }
666   ierr = PetscFree(owner);CHKERRQ(ierr);
667   starts[0] = 0;
668   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
669 
670   /* send the messages */
671   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
672   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
673   cnt = 0;
674   for (i=0; i<size; i++) {
675     if (nprocs[2*i]) {
676       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
677       dest[cnt] = i;
678       cnt++;
679     }
680   }
681   ierr = PetscFree(starts);CHKERRQ(ierr);
682 
683   /* wait on receives */
684   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
685   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
686   cnt  = nrecvs;
687   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
688   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
689   while (cnt) {
690     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
691     /* unpack receives into our local space */
692     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
693     source[imdex] = recv_status.MPI_SOURCE;
694     len[imdex]    = len[imdex]/2;
695     /* count how many local owners for each of my global owned indices */
696     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
697     cnt--;
698   }
699   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
700 
701   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
702   nowned  = 0;
703   nownedm = 0;
704   for (i=0; i<ng; i++) {
705     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
706   }
707 
708   /* create single array to contain rank of all local owners of each globally owned index */
709   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
710   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
711   starts[0] = 0;
712   for (i=1; i<ng; i++) {
713     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
714     else starts[i] = starts[i-1];
715   }
716 
717   /* for each nontrival globally owned node list all arriving processors */
718   for (i=0; i<nrecvs; i++) {
719     for (j=0; j<len[i]; j++) {
720       node = recvs[2*i*nmax+2*j]-rstart;
721       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
722     }
723   }
724 
725   if (debug) { /* -----------------------------------  */
726     starts[0] = 0;
727     for (i=1; i<ng; i++) {
728       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
729       else starts[i] = starts[i-1];
730     }
731     for (i=0; i<ng; i++) {
732       if (nownedsenders[i] > 1) {
733         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
734         for (j=0; j<nownedsenders[i]; j++) {
735           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
736         }
737         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
738       }
739     }
740     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
741   } /* -----------------------------------  */
742 
743   /* wait on original sends */
744   if (nsends) {
745     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
746     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
747     ierr = PetscFree(send_status);CHKERRQ(ierr);
748   }
749   ierr = PetscFree(send_waits);CHKERRQ(ierr);
750   ierr = PetscFree(sends);CHKERRQ(ierr);
751   ierr = PetscFree(nprocs);CHKERRQ(ierr);
752 
753   /* pack messages to send back to local owners */
754   starts[0] = 0;
755   for (i=1; i<ng; i++) {
756     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
757     else starts[i] = starts[i-1];
758   }
759   nsends2 = nrecvs;
760   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
761   for (i=0; i<nrecvs; i++) {
762     nprocs[i] = 1;
763     for (j=0; j<len[i]; j++) {
764       node = recvs[2*i*nmax+2*j]-rstart;
765       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
766     }
767   }
768   nt = 0;
769   for (i=0; i<nsends2; i++) nt += nprocs[i];
770 
771   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
772   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
773 
774   starts2[0] = 0;
775   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
776   /*
777      Each message is 1 + nprocs[i] long, and consists of
778        (0) the number of nodes being sent back
779        (1) the local node number,
780        (2) the number of processors sharing it,
781        (3) the processors sharing it
782   */
783   for (i=0; i<nsends2; i++) {
784     cnt = 1;
785     sends2[starts2[i]] = 0;
786     for (j=0; j<len[i]; j++) {
787       node = recvs[2*i*nmax+2*j]-rstart;
788       if (nownedsenders[node] > 1) {
789         sends2[starts2[i]]++;
790         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
791         sends2[starts2[i]+cnt++] = nownedsenders[node];
792         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
793         cnt += nownedsenders[node];
794       }
795     }
796   }
797 
798   /* receive the message lengths */
799   nrecvs2 = nsends;
800   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
801   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
802   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
803   for (i=0; i<nrecvs2; i++) {
804     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
805   }
806 
807   /* send the message lengths */
808   for (i=0; i<nsends2; i++) {
809     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
810   }
811 
812   /* wait on receives of lens */
813   if (nrecvs2) {
814     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
815     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
816     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
817   }
818   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
819 
820   starts3[0] = 0;
821   nt         = 0;
822   for (i=0; i<nrecvs2-1; i++) {
823     starts3[i+1] = starts3[i] + lens2[i];
824     nt          += lens2[i];
825   }
826   if (nrecvs2) nt += lens2[nrecvs2-1];
827 
828   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
829   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
830   for (i=0; i<nrecvs2; i++) {
831     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
832   }
833 
834   /* send the messages */
835   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
836   for (i=0; i<nsends2; i++) {
837     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
838   }
839 
840   /* wait on receives */
841   if (nrecvs2) {
842     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
843     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
844     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
845   }
846   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
847   ierr = PetscFree(nprocs);CHKERRQ(ierr);
848 
849   if (debug) { /* -----------------------------------  */
850     cnt = 0;
851     for (i=0; i<nrecvs2; i++) {
852       nt = recvs2[cnt++];
853       for (j=0; j<nt; j++) {
854         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
855         for (k=0; k<recvs2[cnt+1]; k++) {
856           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
857         }
858         cnt += 2 + recvs2[cnt+1];
859         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
860       }
861     }
862     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
863   } /* -----------------------------------  */
864 
865   /* count number subdomains for each local node */
866   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
867   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
868   cnt  = 0;
869   for (i=0; i<nrecvs2; i++) {
870     nt = recvs2[cnt++];
871     for (j=0; j<nt; j++) {
872       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
873       cnt += 2 + recvs2[cnt+1];
874     }
875   }
876   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
877   *nproc    = nt;
878   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
879   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
880   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
881   for (i=0;i<nt+1;i++) (*indices)[i]=PETSC_NULL;
882   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
883   cnt       = 0;
884   for (i=0; i<size; i++) {
885     if (nprocs[i] > 0) {
886       bprocs[i]        = cnt;
887       (*procs)[cnt]    = i;
888       (*numprocs)[cnt] = nprocs[i];
889       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
890       cnt++;
891     }
892   }
893 
894   /* make the list of subdomains for each nontrivial local node */
895   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
896   cnt  = 0;
897   for (i=0; i<nrecvs2; i++) {
898     nt = recvs2[cnt++];
899     for (j=0; j<nt; j++) {
900       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
901       cnt += 2 + recvs2[cnt+1];
902     }
903   }
904   ierr = PetscFree(bprocs);CHKERRQ(ierr);
905   ierr = PetscFree(recvs2);CHKERRQ(ierr);
906 
907   /* sort the node indexing by their global numbers */
908   nt = *nproc;
909   for (i=0; i<nt; i++) {
910     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
911     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
912     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
913     ierr = PetscFree(tmp);CHKERRQ(ierr);
914   }
915 
916   if (debug) { /* -----------------------------------  */
917     nt = *nproc;
918     for (i=0; i<nt; i++) {
919       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
920       for (j=0; j<(*numprocs)[i]; j++) {
921         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
922       }
923       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
924     }
925     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
926   } /* -----------------------------------  */
927 
928   /* wait on sends */
929   if (nsends2) {
930     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
931     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
932     ierr = PetscFree(send_status);CHKERRQ(ierr);
933   }
934 
935   ierr = PetscFree(starts3);CHKERRQ(ierr);
936   ierr = PetscFree(dest);CHKERRQ(ierr);
937   ierr = PetscFree(send_waits);CHKERRQ(ierr);
938 
939   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
940   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
941   ierr = PetscFree(starts);CHKERRQ(ierr);
942   ierr = PetscFree(starts2);CHKERRQ(ierr);
943   ierr = PetscFree(lens2);CHKERRQ(ierr);
944 
945   ierr = PetscFree(source);CHKERRQ(ierr);
946   ierr = PetscFree(len);CHKERRQ(ierr);
947   ierr = PetscFree(recvs);CHKERRQ(ierr);
948   ierr = PetscFree(nprocs);CHKERRQ(ierr);
949   ierr = PetscFree(sends2);CHKERRQ(ierr);
950 
951   /* put the information about myself as the first entry in the list */
952   first_procs    = (*procs)[0];
953   first_numprocs = (*numprocs)[0];
954   first_indices  = (*indices)[0];
955   for (i=0; i<*nproc; i++) {
956     if ((*procs)[i] == rank) {
957       (*procs)[0]    = (*procs)[i];
958       (*numprocs)[0] = (*numprocs)[i];
959       (*indices)[0]  = (*indices)[i];
960       (*procs)[i]    = first_procs;
961       (*numprocs)[i] = first_numprocs;
962       (*indices)[i]  = first_indices;
963       break;
964     }
965   }
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
971 /*@C
972     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
973 
974     Collective on ISLocalToGlobalMapping
975 
976     Input Parameters:
977 .   mapping - the mapping from local to global indexing
978 
979     Output Parameter:
980 +   nproc - number of processors that are connected to this one
981 .   proc - neighboring processors
982 .   numproc - number of indices for each processor
983 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
984 
985     Level: advanced
986 
987 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
988           ISLocalToGlobalMappingGetInfo()
989 @*/
990 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
991 {
992   PetscErrorCode ierr;
993   PetscInt       i;
994 
995   PetscFunctionBegin;
996   ierr = PetscFree(*procs);CHKERRQ(ierr);
997   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
998   if (*indices) {
999     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1000     for (i=1; i<*nproc; i++) {
1001       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1002     }
1003     ierr = PetscFree(*indices);CHKERRQ(ierr);
1004   }
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1010 /*@C
1011    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1012 
1013    Not Collective
1014 
1015    Input Arguments:
1016 . ltog - local to global mapping
1017 
1018    Output Arguments:
1019 . array - array of indices
1020 
1021    Level: advanced
1022 
1023 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1024 @*/
1025 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1026 {
1027   PetscFunctionBegin;
1028   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1029   PetscValidPointer(array,2);
1030   *array = ltog->indices;
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1036 /*@C
1037    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1038 
1039    Not Collective
1040 
1041    Input Arguments:
1042 + ltog - local to global mapping
1043 - array - array of indices
1044 
1045    Level: advanced
1046 
1047 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1048 @*/
1049 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1050 {
1051   PetscFunctionBegin;
1052   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1053   PetscValidPointer(array,2);
1054   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1055   *array = PETSC_NULL;
1056   PetscFunctionReturn(0);
1057 }
1058 
1059 #undef __FUNCT__
1060 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1061 /*@C
1062    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1063 
1064    Not Collective
1065 
1066    Input Arguments:
1067 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1068 . n - number of mappings to concatenate
1069 - ltogs - local to global mappings
1070 
1071    Output Arguments:
1072 . ltogcat - new mapping
1073 
1074    Level: advanced
1075 
1076 .seealso: ISLocalToGlobalMappingCreate()
1077 @*/
1078 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1079 {
1080   PetscInt       i,cnt,m,*idx;
1081   PetscErrorCode ierr;
1082 
1083   PetscFunctionBegin;
1084   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1085   if (n > 0) PetscValidPointer(ltogs,3);
1086   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1087   PetscValidPointer(ltogcat,4);
1088   for (cnt=0,i=0; i<n; i++) {
1089     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1090     cnt += m;
1091   }
1092   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1093   for (cnt=0,i=0; i<n; i++) {
1094     const PetscInt *subidx;
1095     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1096     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1097     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1098     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1099     cnt += m;
1100   }
1101   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1102   PetscFunctionReturn(0);
1103 }
1104