xref: /petsc/src/vec/is/utils/isltog.c (revision 247e2d9283ff1bbf8950108a11f1a3a3a92a3dd5)
1 
2 #include <petscvec.h>   /*I "petscvec.h" I*/
3 #include <private/isimpl.h>    /*I "petscis.h"  I*/
4 
5 PetscClassId  IS_LTOGM_CLASSID;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
9 /*@C
10     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   n - the number of entries in the local mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
27 {
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
30   PetscValidIntPointer(n,2);
31   *n = mapping->n;
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "ISLocalToGlobalMappingView"
37 /*@C
38     ISLocalToGlobalMappingView - View a local to global mapping
39 
40     Not Collective
41 
42     Input Parameters:
43 +   ltog - local to global mapping
44 -   viewer - viewer
45 
46     Level: advanced
47 
48     Concepts: mapping^local to global
49 
50 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51 @*/
52 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
53 {
54   PetscInt        i;
55   PetscMPIInt     rank;
56   PetscBool       iascii;
57   PetscErrorCode  ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   if (!viewer) {
62     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
63   }
64   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
65 
66   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
67   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
68   if (iascii) {
69     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
75   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
81 /*@
82     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
83     ordering and a global parallel ordering.
84 
85     Not collective
86 
87     Input Parameter:
88 .   is - index set containing the global numbers for each local number
89 
90     Output Parameter:
91 .   mapping - new mapping data structure
92 
93     Level: advanced
94 
95     Concepts: mapping^local to global
96 
97 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
98 @*/
99 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
100 {
101   PetscErrorCode ierr;
102   PetscInt       n;
103   const PetscInt *indices;
104   MPI_Comm       comm;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(is,IS_CLASSID,1);
108   PetscValidPointer(mapping,2);
109 
110   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
111   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
112   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
113   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
114   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
115   PetscFunctionReturn(0);
116 }
117 
118 #undef __FUNCT__
119 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
120 /*@C
121     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
122     ordering and a global parallel ordering.
123 
124     Collective
125 
126     Input Parameter:
127 +   sf - star forest mapping contiguous local indices to (rank, offset)
128 -   start - first global index on this process
129 
130     Output Parameter:
131 .   mapping - new mapping data structure
132 
133     Level: advanced
134 
135     Concepts: mapping^local to global
136 
137 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
138 @*/
139 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
140 {
141   PetscErrorCode ierr;
142   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
143   const PetscInt *ilocal;
144   MPI_Comm       comm;
145 
146   PetscFunctionBegin;
147   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
148   PetscValidPointer(mapping,3);
149 
150   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
151   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,PETSC_NULL);CHKERRQ(ierr);
152   if (ilocal) {for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);}
153   else maxlocal = nleaves;
154   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
155   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
156   for (i=0; i<nroots; i++) globals[i] = start + i;
157   for (i=0; i<maxlocal; i++) ltog[i] = -1;
158   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
159   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
160   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
161   ierr = PetscFree(globals);CHKERRQ(ierr);
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
167 /*@
168     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
169     ordering and a global parallel ordering.
170 
171     Not Collective, but communicator may have more than one process
172 
173     Input Parameters:
174 +   comm - MPI communicator
175 .   n - the number of local elements
176 .   indices - the global index for each local element
177 -   mode - see PetscCopyMode
178 
179     Output Parameter:
180 .   mapping - new mapping data structure
181 
182     Level: advanced
183 
184     Concepts: mapping^local to global
185 
186 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
187 @*/
188 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
189 {
190   PetscErrorCode ierr;
191   PetscInt       *in;
192 
193   PetscFunctionBegin;
194   if (n) PetscValidIntPointer(indices,3);
195   PetscValidPointer(mapping,4);
196 
197   *mapping = PETSC_NULL;
198 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
199   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
200 #endif
201 
202   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
203 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
204   (*mapping)->n       = n;
205   /*
206     Do not create the global to local mapping. This is only created if
207     ISGlobalToLocalMapping() is called
208   */
209   (*mapping)->globals = 0;
210   if (mode == PETSC_COPY_VALUES) {
211     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
212     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
213     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
214     (*mapping)->indices = in;
215   } else if (mode == PETSC_OWN_POINTER) {
216     (*mapping)->indices = (PetscInt*)indices;
217   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
223 /*@
224     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
225        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
226        and VecSetLocalToGlobalMappingBlock().
227 
228     Not Collective, but communicator may have more than one process
229 
230     Input Parameters:
231 +    inmap - original point-wise mapping
232 -    bs - block size
233 
234     Output Parameter:
235 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
242 @*/
243 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
244 {
245   PetscErrorCode ierr;
246   PetscInt       *ii,i,n;
247 
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
250   PetscValidPointer(outmap,3);
251   if (bs > 1) {
252     n    = inmap->n/bs;
253     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
254     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
255     for (i=0; i<n; i++) {
256       ii[i] = inmap->indices[bs*i]/bs;
257     }
258     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
259   } else {
260     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
261     *outmap = inmap;
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
268 /*@
269     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
270        ISLocalToGlobalMapping
271 
272     Not Collective, but communicator may have more than one process
273 
274     Input Parameter:
275 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
276 - bs - block size
277 
278     Output Parameter:
279 .   outmap - pointwise mapping
280 
281     Level: advanced
282 
283     Concepts: mapping^local to global
284 
285 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
286 @*/
287 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
288 {
289   PetscErrorCode ierr;
290   PetscInt       *ii,i,n;
291 
292   PetscFunctionBegin;
293   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
294   PetscValidPointer(outmap,2);
295   if (bs > 1) {
296     n    = inmap->n*bs;
297     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
298     for (i=0; i<n; i++) {
299       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
300     }
301     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
302   } else {
303     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
304     *outmap = inmap;
305   }
306   PetscFunctionReturn(0);
307 }
308 
309 #undef __FUNCT__
310 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
311 /*@
312    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
313    ordering and a global parallel ordering.
314 
315    Note Collective
316 
317    Input Parameters:
318 .  mapping - mapping data structure
319 
320    Level: advanced
321 
322 .seealso: ISLocalToGlobalMappingCreate()
323 @*/
324 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
325 {
326   PetscErrorCode ierr;
327   PetscFunctionBegin;
328   if (!*mapping) PetscFunctionReturn(0);
329   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
330   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
331   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
332   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
333   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
334   *mapping = 0;
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNCT__
339 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
340 /*@
341     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
342     a new index set using the global numbering defined in an ISLocalToGlobalMapping
343     context.
344 
345     Not collective
346 
347     Input Parameters:
348 +   mapping - mapping between local and global numbering
349 -   is - index set in local numbering
350 
351     Output Parameters:
352 .   newis - index set in global numbering
353 
354     Level: advanced
355 
356     Concepts: mapping^local to global
357 
358 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
359           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
360 @*/
361 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
362 {
363   PetscErrorCode ierr;
364   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
365   const PetscInt *idxin;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
369   PetscValidHeaderSpecific(is,IS_CLASSID,2);
370   PetscValidPointer(newis,3);
371 
372   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
373   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
374   idxmap = mapping->indices;
375 
376   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
377   for (i=0; i<n; i++) {
378     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
379     idxout[i] = idxmap[idxin[i]];
380   }
381   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
382   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
383   PetscFunctionReturn(0);
384 }
385 
386 /*MC
387    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
388    and converts them to the global numbering.
389 
390    Synopsis:
391    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
392 
393    Not collective
394 
395    Input Parameters:
396 +  mapping - the local to global mapping context
397 .  N - number of integers
398 -  in - input indices in local numbering
399 
400    Output Parameter:
401 .  out - indices in global numbering
402 
403    Notes:
404    The in and out array parameters may be identical.
405 
406    Level: advanced
407 
408 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
409           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
410           AOPetscToApplication(), ISGlobalToLocalMappingApply()
411 
412     Concepts: mapping^local to global
413 
414 M*/
415 
416 /* -----------------------------------------------------------------------------------------*/
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
420 /*
421     Creates the global fields in the ISLocalToGlobalMapping structure
422 */
423 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
424 {
425   PetscErrorCode ierr;
426   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
427 
428   PetscFunctionBegin;
429   end   = 0;
430   start = PETSC_MAX_INT;
431 
432   for (i=0; i<n; i++) {
433     if (idx[i] < 0) continue;
434     if (idx[i] < start) start = idx[i];
435     if (idx[i] > end)   end   = idx[i];
436   }
437   if (start > end) {start = 0; end = -1;}
438   mapping->globalstart = start;
439   mapping->globalend   = end;
440 
441   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
442   mapping->globals = globals;
443   for (i=0; i<end-start+1; i++) {
444     globals[i] = -1;
445   }
446   for (i=0; i<n; i++) {
447     if (idx[i] < 0) continue;
448     globals[idx[i] - start] = i;
449   }
450 
451   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "ISGlobalToLocalMappingApply"
457 /*@
458     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
459     specified with a global numbering.
460 
461     Not collective
462 
463     Input Parameters:
464 +   mapping - mapping between local and global numbering
465 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
466            IS_GTOLM_DROP - drops the indices with no local value from the output list
467 .   n - number of global indices to map
468 -   idx - global indices to map
469 
470     Output Parameters:
471 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
472 -   idxout - local index of each global index, one must pass in an array long enough
473              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
474              idxout == PETSC_NULL to determine the required length (returned in nout)
475              and then allocate the required space and call ISGlobalToLocalMappingApply()
476              a second time to set the values.
477 
478     Notes:
479     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
480 
481     This is not scalable in memory usage. Each processor requires O(Nglobal) size
482     array to compute these.
483 
484     Level: advanced
485 
486     Concepts: mapping^global to local
487 
488 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
489           ISLocalToGlobalMappingDestroy()
490 @*/
491 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
492                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
493 {
494   PetscInt       i,*globals,nf = 0,tmp,start,end;
495   PetscErrorCode ierr;
496 
497   PetscFunctionBegin;
498   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
499   if (!mapping->globals) {
500     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
501   }
502   globals = mapping->globals;
503   start   = mapping->globalstart;
504   end     = mapping->globalend;
505 
506   if (type == IS_GTOLM_MASK) {
507     if (idxout) {
508       for (i=0; i<n; i++) {
509         if (idx[i] < 0) idxout[i] = idx[i];
510         else if (idx[i] < start) idxout[i] = -1;
511         else if (idx[i] > end)   idxout[i] = -1;
512         else                     idxout[i] = globals[idx[i] - start];
513       }
514     }
515     if (nout) *nout = n;
516   } else {
517     if (idxout) {
518       for (i=0; i<n; i++) {
519         if (idx[i] < 0) continue;
520         if (idx[i] < start) continue;
521         if (idx[i] > end) continue;
522         tmp = globals[idx[i] - start];
523         if (tmp < 0) continue;
524         idxout[nf++] = tmp;
525       }
526     } else {
527       for (i=0; i<n; i++) {
528         if (idx[i] < 0) continue;
529         if (idx[i] < start) continue;
530         if (idx[i] > end) continue;
531         tmp = globals[idx[i] - start];
532         if (tmp < 0) continue;
533         nf++;
534       }
535     }
536     if (nout) *nout = nf;
537   }
538 
539   PetscFunctionReturn(0);
540 }
541 
542 #undef __FUNCT__
543 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
544 /*@C
545     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
546      each index shared by more than one processor
547 
548     Collective on ISLocalToGlobalMapping
549 
550     Input Parameters:
551 .   mapping - the mapping from local to global indexing
552 
553     Output Parameter:
554 +   nproc - number of processors that are connected to this one
555 .   proc - neighboring processors
556 .   numproc - number of indices for each subdomain (processor)
557 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
558 
559     Level: advanced
560 
561     Concepts: mapping^local to global
562 
563     Fortran Usage:
564 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
565 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
566           PetscInt indices[nproc][numprocmax],ierr)
567         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
568         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
569 
570 
571 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
572           ISLocalToGlobalMappingRestoreInfo()
573 @*/
574 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
575 {
576   PetscErrorCode ierr;
577   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
578   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
579   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
580   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
581   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
582   PetscInt       first_procs,first_numprocs,*first_indices;
583   MPI_Request    *recv_waits,*send_waits;
584   MPI_Status     recv_status,*send_status,*recv_statuses;
585   MPI_Comm       comm = ((PetscObject)mapping)->comm;
586   PetscBool      debug = PETSC_FALSE;
587 
588   PetscFunctionBegin;
589   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
590   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
591   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
592   if (size == 1) {
593     *nproc         = 0;
594     *procs         = PETSC_NULL;
595     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
596     (*numprocs)[0] = 0;
597     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
598     (*indices)[0]  = PETSC_NULL;
599     PetscFunctionReturn(0);
600   }
601 
602   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
603 
604   /*
605     Notes on ISLocalToGlobalMappingGetInfo
606 
607     globally owned node - the nodes that have been assigned to this processor in global
608            numbering, just for this routine.
609 
610     nontrivial globally owned node - node assigned to this processor that is on a subdomain
611            boundary (i.e. is has more than one local owner)
612 
613     locally owned node - node that exists on this processors subdomain
614 
615     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
616            local subdomain
617   */
618   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
619   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
620   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
621 
622   for (i=0; i<n; i++) {
623     if (lindices[i] > max) max = lindices[i];
624   }
625   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
626   Ng++;
627   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
628   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
629   scale  = Ng/size + 1;
630   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
631   rstart = scale*rank;
632 
633   /* determine ownership ranges of global indices */
634   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
635   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
636 
637   /* determine owners of each local node  */
638   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
639   for (i=0; i<n; i++) {
640     proc             = lindices[i]/scale; /* processor that globally owns this index */
641     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
642     owner[i]         = proc;
643     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
644   }
645   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
646   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
647 
648   /* inform other processors of number of messages and max length*/
649   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
650   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
651 
652   /* post receives for owned rows */
653   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
654   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
655   for (i=0; i<nrecvs; i++) {
656     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
657   }
658 
659   /* pack messages containing lists of local nodes to owners */
660   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
661   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
662   starts[0]  = 0;
663   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
664   for (i=0; i<n; i++) {
665     sends[starts[owner[i]]++] = lindices[i];
666     sends[starts[owner[i]]++] = i;
667   }
668   ierr = PetscFree(owner);CHKERRQ(ierr);
669   starts[0]  = 0;
670   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
671 
672   /* send the messages */
673   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
674   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
675   cnt = 0;
676   for (i=0; i<size; i++) {
677     if (nprocs[2*i]) {
678       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
679       dest[cnt] = i;
680       cnt++;
681     }
682   }
683   ierr = PetscFree(starts);CHKERRQ(ierr);
684 
685   /* wait on receives */
686   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
687   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
688   cnt  = nrecvs;
689   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
690   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
691   while (cnt) {
692     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
693     /* unpack receives into our local space */
694     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
695     source[imdex]  = recv_status.MPI_SOURCE;
696     len[imdex]     = len[imdex]/2;
697     /* count how many local owners for each of my global owned indices */
698     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
699     cnt--;
700   }
701   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
702 
703   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
704   nowned  = 0;
705   nownedm = 0;
706   for (i=0; i<ng; i++) {
707     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
708   }
709 
710   /* create single array to contain rank of all local owners of each globally owned index */
711   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
712   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
713   starts[0] = 0;
714   for (i=1; i<ng; i++) {
715     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
716     else starts[i] = starts[i-1];
717   }
718 
719   /* for each nontrival globally owned node list all arriving processors */
720   for (i=0; i<nrecvs; i++) {
721     for (j=0; j<len[i]; j++) {
722       node = recvs[2*i*nmax+2*j]-rstart;
723       if (nownedsenders[node] > 1) {
724         ownedsenders[starts[node]++] = source[i];
725       }
726     }
727   }
728 
729   if (debug) { /* -----------------------------------  */
730     starts[0]    = 0;
731     for (i=1; i<ng; i++) {
732       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
733       else starts[i] = starts[i-1];
734     }
735     for (i=0; i<ng; i++) {
736       if (nownedsenders[i] > 1) {
737         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
738         for (j=0; j<nownedsenders[i]; j++) {
739           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
740         }
741         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
742       }
743     }
744     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
745   }/* -----------------------------------  */
746 
747   /* wait on original sends */
748   if (nsends) {
749     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
750     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
751     ierr = PetscFree(send_status);CHKERRQ(ierr);
752   }
753   ierr = PetscFree(send_waits);CHKERRQ(ierr);
754   ierr = PetscFree(sends);CHKERRQ(ierr);
755   ierr = PetscFree(nprocs);CHKERRQ(ierr);
756 
757   /* pack messages to send back to local owners */
758   starts[0]    = 0;
759   for (i=1; i<ng; i++) {
760     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
761     else starts[i] = starts[i-1];
762   }
763   nsends2 = nrecvs;
764   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
765   for (i=0; i<nrecvs; i++) {
766     nprocs[i] = 1;
767     for (j=0; j<len[i]; j++) {
768       node = recvs[2*i*nmax+2*j]-rstart;
769       if (nownedsenders[node] > 1) {
770         nprocs[i] += 2 + nownedsenders[node];
771       }
772     }
773   }
774   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
775   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
776   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
777   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
778   /*
779      Each message is 1 + nprocs[i] long, and consists of
780        (0) the number of nodes being sent back
781        (1) the local node number,
782        (2) the number of processors sharing it,
783        (3) the processors sharing it
784   */
785   for (i=0; i<nsends2; i++) {
786     cnt = 1;
787     sends2[starts2[i]] = 0;
788     for (j=0; j<len[i]; j++) {
789       node = recvs[2*i*nmax+2*j]-rstart;
790       if (nownedsenders[node] > 1) {
791         sends2[starts2[i]]++;
792         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
793         sends2[starts2[i]+cnt++] = nownedsenders[node];
794         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
795         cnt += nownedsenders[node];
796       }
797     }
798   }
799 
800   /* receive the message lengths */
801   nrecvs2 = nsends;
802   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
803   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
804   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
805   for (i=0; i<nrecvs2; i++) {
806     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
807   }
808 
809   /* send the message lengths */
810   for (i=0; i<nsends2; i++) {
811     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
812   }
813 
814   /* wait on receives of lens */
815   if (nrecvs2) {
816     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
817     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
818     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
819   }
820   ierr = PetscFree(recv_waits);
821 
822   starts3[0] = 0;
823   nt         = 0;
824   for (i=0; i<nrecvs2-1; i++) {
825     starts3[i+1] = starts3[i] + lens2[i];
826     nt          += lens2[i];
827   }
828   nt += lens2[nrecvs2-1];
829 
830   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
831   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
832   for (i=0; i<nrecvs2; i++) {
833     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
834   }
835 
836   /* send the messages */
837   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
838   for (i=0; i<nsends2; i++) {
839     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
840   }
841 
842   /* wait on receives */
843   if (nrecvs2) {
844     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
845     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
846     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
847   }
848   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
849   ierr = PetscFree(nprocs);CHKERRQ(ierr);
850 
851   if (debug) { /* -----------------------------------  */
852     cnt = 0;
853     for (i=0; i<nrecvs2; i++) {
854       nt = recvs2[cnt++];
855       for (j=0; j<nt; j++) {
856         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
857         for (k=0; k<recvs2[cnt+1]; k++) {
858           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
859         }
860         cnt += 2 + recvs2[cnt+1];
861         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
862       }
863     }
864     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
865   } /* -----------------------------------  */
866 
867   /* count number subdomains for each local node */
868   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
869   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
870   cnt  = 0;
871   for (i=0; i<nrecvs2; i++) {
872     nt = recvs2[cnt++];
873     for (j=0; j<nt; j++) {
874       for (k=0; k<recvs2[cnt+1]; k++) {
875         nprocs[recvs2[cnt+2+k]]++;
876       }
877       cnt += 2 + recvs2[cnt+1];
878     }
879   }
880   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
881   *nproc    = nt;
882   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
883   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
884   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
885   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
886   cnt       = 0;
887   for (i=0; i<size; i++) {
888     if (nprocs[i] > 0) {
889       bprocs[i]        = cnt;
890       (*procs)[cnt]    = i;
891       (*numprocs)[cnt] = nprocs[i];
892       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
893       cnt++;
894     }
895   }
896 
897   /* make the list of subdomains for each nontrivial local node */
898   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
899   cnt  = 0;
900   for (i=0; i<nrecvs2; i++) {
901     nt = recvs2[cnt++];
902     for (j=0; j<nt; j++) {
903       for (k=0; k<recvs2[cnt+1]; k++) {
904         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
905       }
906       cnt += 2 + recvs2[cnt+1];
907     }
908   }
909   ierr = PetscFree(bprocs);CHKERRQ(ierr);
910   ierr = PetscFree(recvs2);CHKERRQ(ierr);
911 
912   /* sort the node indexing by their global numbers */
913   nt = *nproc;
914   for (i=0; i<nt; i++) {
915     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
916     for (j=0; j<(*numprocs)[i]; j++) {
917       tmp[j] = lindices[(*indices)[i][j]];
918     }
919     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
920     ierr = PetscFree(tmp);CHKERRQ(ierr);
921   }
922 
923   if (debug) { /* -----------------------------------  */
924     nt = *nproc;
925     for (i=0; i<nt; i++) {
926       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
927       for (j=0; j<(*numprocs)[i]; j++) {
928         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
929       }
930       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
931     }
932     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
933   } /* -----------------------------------  */
934 
935   /* wait on sends */
936   if (nsends2) {
937     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
938     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
939     ierr = PetscFree(send_status);CHKERRQ(ierr);
940   }
941 
942   ierr = PetscFree(starts3);CHKERRQ(ierr);
943   ierr = PetscFree(dest);CHKERRQ(ierr);
944   ierr = PetscFree(send_waits);CHKERRQ(ierr);
945 
946   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
947   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
948   ierr = PetscFree(starts);CHKERRQ(ierr);
949   ierr = PetscFree(starts2);CHKERRQ(ierr);
950   ierr = PetscFree(lens2);CHKERRQ(ierr);
951 
952   ierr = PetscFree(source);CHKERRQ(ierr);
953   ierr = PetscFree(len);CHKERRQ(ierr);
954   ierr = PetscFree(recvs);CHKERRQ(ierr);
955   ierr = PetscFree(nprocs);CHKERRQ(ierr);
956   ierr = PetscFree(sends2);CHKERRQ(ierr);
957 
958   /* put the information about myself as the first entry in the list */
959   first_procs    = (*procs)[0];
960   first_numprocs = (*numprocs)[0];
961   first_indices  = (*indices)[0];
962   for (i=0; i<*nproc; i++) {
963     if ((*procs)[i] == rank) {
964       (*procs)[0]    = (*procs)[i];
965       (*numprocs)[0] = (*numprocs)[i];
966       (*indices)[0]  = (*indices)[i];
967       (*procs)[i]    = first_procs;
968       (*numprocs)[i] = first_numprocs;
969       (*indices)[i]  = first_indices;
970       break;
971     }
972   }
973   PetscFunctionReturn(0);
974 }
975 
976 #undef __FUNCT__
977 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
978 /*@C
979     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
980 
981     Collective on ISLocalToGlobalMapping
982 
983     Input Parameters:
984 .   mapping - the mapping from local to global indexing
985 
986     Output Parameter:
987 +   nproc - number of processors that are connected to this one
988 .   proc - neighboring processors
989 .   numproc - number of indices for each processor
990 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
991 
992     Level: advanced
993 
994 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
995           ISLocalToGlobalMappingGetInfo()
996 @*/
997 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
998 {
999   PetscErrorCode ierr;
1000   PetscInt       i;
1001 
1002   PetscFunctionBegin;
1003   ierr = PetscFree(*procs);CHKERRQ(ierr);
1004   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1005   if (*indices) {
1006     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1007     for (i=1; i<*nproc; i++) {
1008       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1009     }
1010     ierr = PetscFree(*indices);CHKERRQ(ierr);
1011   }
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 #undef __FUNCT__
1016 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1017 /*@C
1018    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1019 
1020    Not Collective
1021 
1022    Input Arguments:
1023 . ltog - local to global mapping
1024 
1025    Output Arguments:
1026 . array - array of indices
1027 
1028    Level: advanced
1029 
1030 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1031 @*/
1032 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1033 {
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1037   PetscValidPointer(array,2);
1038   *array = ltog->indices;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1044 /*@C
1045    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1046 
1047    Not Collective
1048 
1049    Input Arguments:
1050 + ltog - local to global mapping
1051 - array - array of indices
1052 
1053    Level: advanced
1054 
1055 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1056 @*/
1057 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1058 {
1059 
1060   PetscFunctionBegin;
1061   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1062   PetscValidPointer(array,2);
1063   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1064   *array = PETSC_NULL;
1065   PetscFunctionReturn(0);
1066 }
1067 
1068 #undef __FUNCT__
1069 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1070 /*@C
1071    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1072 
1073    Not Collective
1074 
1075    Input Arguments:
1076 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1077 . n - number of mappings to concatenate
1078 - ltogs - local to global mappings
1079 
1080    Output Arguments:
1081 . ltogcat - new mapping
1082 
1083    Level: advanced
1084 
1085 .seealso: ISLocalToGlobalMappingCreate()
1086 @*/
1087 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1088 {
1089   PetscInt       i,cnt,m,*idx;
1090   PetscErrorCode ierr;
1091 
1092   PetscFunctionBegin;
1093   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1094   if (n > 0) PetscValidPointer(ltogs,3);
1095   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1096   PetscValidPointer(ltogcat,4);
1097   for (cnt=0,i=0; i<n; i++) {
1098     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1099     cnt += m;
1100   }
1101   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1102   for (cnt=0,i=0; i<n; i++) {
1103     const PetscInt *subidx;
1104     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1105     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1106     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1107     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1108     cnt += m;
1109   }
1110   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1111   PetscFunctionReturn(0);
1112 }
1113