xref: /petsc/src/vec/is/utils/isltog.c (revision f0413b6f5d7c9354ff89613ff0ab87d5c3d1b2ee)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@C
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
142   /*  if (!isblock) { */
143     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
145     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
146     /*  } else {
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150      }*/
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
205 /*@
206     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
207     ordering and a global parallel ordering.
208 
209     Not Collective, but communicator may have more than one process
210 
211     Input Parameters:
212 +   comm - MPI communicator
213 .   bs - the block size
214 .   n - the number of local elements
215 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled by the blocksize bs
216 -   mode - see PetscCopyMode
217 
218     Output Parameter:
219 .   mapping - new mapping data structure
220 
221     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
222     Level: advanced
223 
224     Concepts: mapping^local to global
225 
226 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
227 @*/
228 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
229 {
230   PetscErrorCode ierr;
231   PetscInt       *in;
232 
233   PetscFunctionBegin;
234   if (n) PetscValidIntPointer(indices,3);
235   PetscValidPointer(mapping,4);
236 
237   *mapping = NULL;
238   ierr = ISInitializePackage();CHKERRQ(ierr);
239 
240   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
241                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
242   (*mapping)->n  = n;
243   (*mapping)->bs = bs;
244   /*
245     Do not create the global to local mapping. This is only created if
246     ISGlobalToLocalMapping() is called
247   */
248   (*mapping)->globals = 0;
249   if (mode == PETSC_COPY_VALUES) {
250     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
251     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
252     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
253     (*mapping)->indices = in;
254   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
255   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
256   PetscFunctionReturn(0);
257 }
258 
259 #undef __FUNCT__
260 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
261 /*@
262     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
263        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
264        and VecSetLocalToGlobalMappingBlock().
265 
266     Not Collective, but communicator may have more than one process
267 
268     Input Parameters:
269 +    inmap - original point-wise mapping
270 -    bs - block size
271 
272     Output Parameter:
273 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
274 
275     Level: advanced
276 
277     Concepts: mapping^local to global
278 
279 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
280 @*/
281 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
282 {
283   PetscErrorCode ierr;
284   PetscInt       *ii,i,n;
285 
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
288   PetscValidPointer(outmap,3);
289   if (bs > 1) {
290     n = inmap->n/bs;
291     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
292     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
293     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
294     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),1,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
295   } else {
296     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
297     *outmap = inmap;
298   }
299   PetscFunctionReturn(0);
300 }
301 
302 #undef __FUNCT__
303 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
304 /*@
305     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
306        ISLocalToGlobalMapping
307 
308     Not Collective, but communicator may have more than one process
309 
310     Input Parameter:
311 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
312 - bs - block size
313 
314     Output Parameter:
315 .   outmap - pointwise mapping
316 
317     Level: advanced
318 
319     Concepts: mapping^local to global
320 
321 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
322 @*/
323 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
324 {
325   PetscErrorCode ierr;
326   PetscInt       *ii,i,n;
327 
328   PetscFunctionBegin;
329   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
330   PetscValidPointer(outmap,2);
331   if (bs > 1) {
332     n    = inmap->n*bs;
333     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
334     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
335     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),1,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
336   } else {
337     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
338     *outmap = inmap;
339   }
340   PetscFunctionReturn(0);
341 }
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
345 /*@
346    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
347    ordering and a global parallel ordering.
348 
349    Note Collective
350 
351    Input Parameters:
352 .  mapping - mapping data structure
353 
354    Level: advanced
355 
356 .seealso: ISLocalToGlobalMappingCreate()
357 @*/
358 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
359 {
360   PetscErrorCode ierr;
361 
362   PetscFunctionBegin;
363   if (!*mapping) PetscFunctionReturn(0);
364   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
365   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
366   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
367   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
368   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
369   *mapping = 0;
370   PetscFunctionReturn(0);
371 }
372 
373 #undef __FUNCT__
374 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
375 /*@
376     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
377     a new index set using the global numbering defined in an ISLocalToGlobalMapping
378     context.
379 
380     Not collective
381 
382     Input Parameters:
383 +   mapping - mapping between local and global numbering
384 -   is - index set in local numbering
385 
386     Output Parameters:
387 .   newis - index set in global numbering
388 
389     Level: advanced
390 
391     Concepts: mapping^local to global
392 
393 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
394           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
395 @*/
396 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
397 {
398   PetscErrorCode ierr;
399   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
400   const PetscInt *idxin;
401 
402   PetscFunctionBegin;
403   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
404   PetscValidHeaderSpecific(is,IS_CLASSID,2);
405   PetscValidPointer(newis,3);
406 
407   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
408   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
409   idxmap = mapping->indices;
410 
411   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
412   for (i=0; i<n; i++) {
413     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",idxin[i],Nmax-1,i);
414     idxout[i] = idxmap[idxin[i]];
415   }
416   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
417   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
418   PetscFunctionReturn(0);
419 }
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "ISLocalToGlobalMappingApply"
423 /*@
424    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
425    and converts them to the global numbering.
426 
427    Not collective
428 
429    Input Parameters:
430 +  mapping - the local to global mapping context
431 .  N - number of integers
432 -  in - input indices in local numbering
433 
434    Output Parameter:
435 .  out - indices in global numbering
436 
437    Notes:
438    The in and out array parameters may be identical.
439 
440    Level: advanced
441 
442 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
443           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
444           AOPetscToApplication(), ISGlobalToLocalMappingApply()
445 
446     Concepts: mapping^local to global
447 @*/
448 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
449 {
450   PetscInt       i,Nmax = mapping->n;
451   const PetscInt *idx = mapping->indices;
452 
453   PetscFunctionBegin;
454   for (i=0; i<N; i++) {
455     if (in[i] < 0) {
456       out[i] = in[i];
457       continue;
458     }
459     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
460     out[i] = idx[in[i]];
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 /* -----------------------------------------------------------------------------------------*/
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
469 /*
470     Creates the global fields in the ISLocalToGlobalMapping structure
471 */
472 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
473 {
474   PetscErrorCode ierr;
475   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
476 
477   PetscFunctionBegin;
478   end   = 0;
479   start = PETSC_MAX_INT;
480 
481   for (i=0; i<n; i++) {
482     if (idx[i] < 0) continue;
483     if (idx[i] < start) start = idx[i];
484     if (idx[i] > end)   end   = idx[i];
485   }
486   if (start > end) {start = 0; end = -1;}
487   mapping->globalstart = start;
488   mapping->globalend   = end;
489 
490   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
491   mapping->globals = globals;
492   for (i=0; i<end-start+1; i++) globals[i] = -1;
493   for (i=0; i<n; i++) {
494     if (idx[i] < 0) continue;
495     globals[idx[i] - start] = i;
496   }
497 
498   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
499   PetscFunctionReturn(0);
500 }
501 
502 #undef __FUNCT__
503 #define __FUNCT__ "ISGlobalToLocalMappingApply"
504 /*@
505     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
506     specified with a global numbering.
507 
508     Not collective
509 
510     Input Parameters:
511 +   mapping - mapping between local and global numbering
512 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
513            IS_GTOLM_DROP - drops the indices with no local value from the output list
514 .   n - number of global indices to map
515 -   idx - global indices to map
516 
517     Output Parameters:
518 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
519 -   idxout - local index of each global index, one must pass in an array long enough
520              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
521              idxout == NULL to determine the required length (returned in nout)
522              and then allocate the required space and call ISGlobalToLocalMappingApply()
523              a second time to set the values.
524 
525     Notes:
526     Either nout or idxout may be NULL. idx and idxout may be identical.
527 
528     This is not scalable in memory usage. Each processor requires O(Nglobal) size
529     array to compute these.
530 
531     Level: advanced
532 
533     Developer Note: The manual page states that idx and idxout may be identical but the calling
534        sequence declares idx as const so it cannot be the same as idxout.
535 
536     Concepts: mapping^global to local
537 
538 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
539           ISLocalToGlobalMappingDestroy()
540 @*/
541 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
542                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
543 {
544   PetscInt       i,*globals,nf = 0,tmp,start,end;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin;
548   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
549   if (!mapping->globals) {
550     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
551   }
552   globals = mapping->globals;
553   start   = mapping->globalstart;
554   end     = mapping->globalend;
555 
556   if (type == IS_GTOLM_MASK) {
557     if (idxout) {
558       for (i=0; i<n; i++) {
559         if (idx[i] < 0) idxout[i] = idx[i];
560         else if (idx[i] < start) idxout[i] = -1;
561         else if (idx[i] > end)   idxout[i] = -1;
562         else                     idxout[i] = globals[idx[i] - start];
563       }
564     }
565     if (nout) *nout = n;
566   } else {
567     if (idxout) {
568       for (i=0; i<n; i++) {
569         if (idx[i] < 0) continue;
570         if (idx[i] < start) continue;
571         if (idx[i] > end) continue;
572         tmp = globals[idx[i] - start];
573         if (tmp < 0) continue;
574         idxout[nf++] = tmp;
575       }
576     } else {
577       for (i=0; i<n; i++) {
578         if (idx[i] < 0) continue;
579         if (idx[i] < start) continue;
580         if (idx[i] > end) continue;
581         tmp = globals[idx[i] - start];
582         if (tmp < 0) continue;
583         nf++;
584       }
585     }
586     if (nout) *nout = nf;
587   }
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
593 /*@C
594     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
595      each index shared by more than one processor
596 
597     Collective on ISLocalToGlobalMapping
598 
599     Input Parameters:
600 .   mapping - the mapping from local to global indexing
601 
602     Output Parameter:
603 +   nproc - number of processors that are connected to this one
604 .   proc - neighboring processors
605 .   numproc - number of indices for each subdomain (processor)
606 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
607 
608     Level: advanced
609 
610     Concepts: mapping^local to global
611 
612     Fortran Usage:
613 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
614 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
615           PetscInt indices[nproc][numprocmax],ierr)
616         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
617         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
618 
619 
620 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
621           ISLocalToGlobalMappingRestoreInfo()
622 @*/
623 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
624 {
625   PetscErrorCode ierr;
626   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
627   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
628   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
629   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
630   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
631   PetscInt       first_procs,first_numprocs,*first_indices;
632   MPI_Request    *recv_waits,*send_waits;
633   MPI_Status     recv_status,*send_status,*recv_statuses;
634   MPI_Comm       comm;
635   PetscBool      debug = PETSC_FALSE;
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
639   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
640   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
641   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
642   if (size == 1) {
643     *nproc         = 0;
644     *procs         = NULL;
645     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
646     (*numprocs)[0] = 0;
647     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
648     (*indices)[0]  = NULL;
649     PetscFunctionReturn(0);
650   }
651 
652   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
653 
654   /*
655     Notes on ISLocalToGlobalMappingGetInfo
656 
657     globally owned node - the nodes that have been assigned to this processor in global
658            numbering, just for this routine.
659 
660     nontrivial globally owned node - node assigned to this processor that is on a subdomain
661            boundary (i.e. is has more than one local owner)
662 
663     locally owned node - node that exists on this processors subdomain
664 
665     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
666            local subdomain
667   */
668   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
669   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
670   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
671 
672   for (i=0; i<n; i++) {
673     if (lindices[i] > max) max = lindices[i];
674   }
675   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
676   Ng++;
677   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
678   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
679   scale  = Ng/size + 1;
680   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
681   rstart = scale*rank;
682 
683   /* determine ownership ranges of global indices */
684   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
685   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
686 
687   /* determine owners of each local node  */
688   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
689   for (i=0; i<n; i++) {
690     proc             = lindices[i]/scale; /* processor that globally owns this index */
691     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
692     owner[i]         = proc;
693     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
694   }
695   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
696   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
697 
698   /* inform other processors of number of messages and max length*/
699   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
700   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
701 
702   /* post receives for owned rows */
703   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
704   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
705   for (i=0; i<nrecvs; i++) {
706     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
707   }
708 
709   /* pack messages containing lists of local nodes to owners */
710   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
711   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
712   starts[0] = 0;
713   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
714   for (i=0; i<n; i++) {
715     sends[starts[owner[i]]++] = lindices[i];
716     sends[starts[owner[i]]++] = i;
717   }
718   ierr = PetscFree(owner);CHKERRQ(ierr);
719   starts[0] = 0;
720   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
721 
722   /* send the messages */
723   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
724   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
725   cnt = 0;
726   for (i=0; i<size; i++) {
727     if (nprocs[2*i]) {
728       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
729       dest[cnt] = i;
730       cnt++;
731     }
732   }
733   ierr = PetscFree(starts);CHKERRQ(ierr);
734 
735   /* wait on receives */
736   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
737   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
738   cnt  = nrecvs;
739   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
740   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
741   while (cnt) {
742     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
743     /* unpack receives into our local space */
744     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
745     source[imdex] = recv_status.MPI_SOURCE;
746     len[imdex]    = len[imdex]/2;
747     /* count how many local owners for each of my global owned indices */
748     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
749     cnt--;
750   }
751   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
752 
753   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
754   nowned  = 0;
755   nownedm = 0;
756   for (i=0; i<ng; i++) {
757     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
758   }
759 
760   /* create single array to contain rank of all local owners of each globally owned index */
761   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
762   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
763   starts[0] = 0;
764   for (i=1; i<ng; i++) {
765     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
766     else starts[i] = starts[i-1];
767   }
768 
769   /* for each nontrival globally owned node list all arriving processors */
770   for (i=0; i<nrecvs; i++) {
771     for (j=0; j<len[i]; j++) {
772       node = recvs[2*i*nmax+2*j]-rstart;
773       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
774     }
775   }
776 
777   if (debug) { /* -----------------------------------  */
778     starts[0] = 0;
779     for (i=1; i<ng; i++) {
780       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
781       else starts[i] = starts[i-1];
782     }
783     for (i=0; i<ng; i++) {
784       if (nownedsenders[i] > 1) {
785         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
786         for (j=0; j<nownedsenders[i]; j++) {
787           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
788         }
789         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
790       }
791     }
792     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
793   } /* -----------------------------------  */
794 
795   /* wait on original sends */
796   if (nsends) {
797     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
798     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
799     ierr = PetscFree(send_status);CHKERRQ(ierr);
800   }
801   ierr = PetscFree(send_waits);CHKERRQ(ierr);
802   ierr = PetscFree(sends);CHKERRQ(ierr);
803   ierr = PetscFree(nprocs);CHKERRQ(ierr);
804 
805   /* pack messages to send back to local owners */
806   starts[0] = 0;
807   for (i=1; i<ng; i++) {
808     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
809     else starts[i] = starts[i-1];
810   }
811   nsends2 = nrecvs;
812   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
813   for (i=0; i<nrecvs; i++) {
814     nprocs[i] = 1;
815     for (j=0; j<len[i]; j++) {
816       node = recvs[2*i*nmax+2*j]-rstart;
817       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
818     }
819   }
820   nt = 0;
821   for (i=0; i<nsends2; i++) nt += nprocs[i];
822 
823   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
824   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
825 
826   starts2[0] = 0;
827   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
828   /*
829      Each message is 1 + nprocs[i] long, and consists of
830        (0) the number of nodes being sent back
831        (1) the local node number,
832        (2) the number of processors sharing it,
833        (3) the processors sharing it
834   */
835   for (i=0; i<nsends2; i++) {
836     cnt = 1;
837     sends2[starts2[i]] = 0;
838     for (j=0; j<len[i]; j++) {
839       node = recvs[2*i*nmax+2*j]-rstart;
840       if (nownedsenders[node] > 1) {
841         sends2[starts2[i]]++;
842         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
843         sends2[starts2[i]+cnt++] = nownedsenders[node];
844         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
845         cnt += nownedsenders[node];
846       }
847     }
848   }
849 
850   /* receive the message lengths */
851   nrecvs2 = nsends;
852   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
853   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
854   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
855   for (i=0; i<nrecvs2; i++) {
856     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
857   }
858 
859   /* send the message lengths */
860   for (i=0; i<nsends2; i++) {
861     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
862   }
863 
864   /* wait on receives of lens */
865   if (nrecvs2) {
866     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
867     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
868     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
869   }
870   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
871 
872   starts3[0] = 0;
873   nt         = 0;
874   for (i=0; i<nrecvs2-1; i++) {
875     starts3[i+1] = starts3[i] + lens2[i];
876     nt          += lens2[i];
877   }
878   if (nrecvs2) nt += lens2[nrecvs2-1];
879 
880   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
881   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
882   for (i=0; i<nrecvs2; i++) {
883     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
884   }
885 
886   /* send the messages */
887   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
888   for (i=0; i<nsends2; i++) {
889     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
890   }
891 
892   /* wait on receives */
893   if (nrecvs2) {
894     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
895     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
896     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
897   }
898   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
899   ierr = PetscFree(nprocs);CHKERRQ(ierr);
900 
901   if (debug) { /* -----------------------------------  */
902     cnt = 0;
903     for (i=0; i<nrecvs2; i++) {
904       nt = recvs2[cnt++];
905       for (j=0; j<nt; j++) {
906         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
907         for (k=0; k<recvs2[cnt+1]; k++) {
908           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
909         }
910         cnt += 2 + recvs2[cnt+1];
911         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
912       }
913     }
914     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
915   } /* -----------------------------------  */
916 
917   /* count number subdomains for each local node */
918   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
919   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
920   cnt  = 0;
921   for (i=0; i<nrecvs2; i++) {
922     nt = recvs2[cnt++];
923     for (j=0; j<nt; j++) {
924       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
925       cnt += 2 + recvs2[cnt+1];
926     }
927   }
928   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
929   *nproc    = nt;
930   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
931   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
932   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
933   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
934   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
935   cnt       = 0;
936   for (i=0; i<size; i++) {
937     if (nprocs[i] > 0) {
938       bprocs[i]        = cnt;
939       (*procs)[cnt]    = i;
940       (*numprocs)[cnt] = nprocs[i];
941       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
942       cnt++;
943     }
944   }
945 
946   /* make the list of subdomains for each nontrivial local node */
947   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
948   cnt  = 0;
949   for (i=0; i<nrecvs2; i++) {
950     nt = recvs2[cnt++];
951     for (j=0; j<nt; j++) {
952       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
953       cnt += 2 + recvs2[cnt+1];
954     }
955   }
956   ierr = PetscFree(bprocs);CHKERRQ(ierr);
957   ierr = PetscFree(recvs2);CHKERRQ(ierr);
958 
959   /* sort the node indexing by their global numbers */
960   nt = *nproc;
961   for (i=0; i<nt; i++) {
962     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
963     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
964     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
965     ierr = PetscFree(tmp);CHKERRQ(ierr);
966   }
967 
968   if (debug) { /* -----------------------------------  */
969     nt = *nproc;
970     for (i=0; i<nt; i++) {
971       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
972       for (j=0; j<(*numprocs)[i]; j++) {
973         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
974       }
975       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
976     }
977     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
978   } /* -----------------------------------  */
979 
980   /* wait on sends */
981   if (nsends2) {
982     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
983     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
984     ierr = PetscFree(send_status);CHKERRQ(ierr);
985   }
986 
987   ierr = PetscFree(starts3);CHKERRQ(ierr);
988   ierr = PetscFree(dest);CHKERRQ(ierr);
989   ierr = PetscFree(send_waits);CHKERRQ(ierr);
990 
991   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
992   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
993   ierr = PetscFree(starts);CHKERRQ(ierr);
994   ierr = PetscFree(starts2);CHKERRQ(ierr);
995   ierr = PetscFree(lens2);CHKERRQ(ierr);
996 
997   ierr = PetscFree(source);CHKERRQ(ierr);
998   ierr = PetscFree(len);CHKERRQ(ierr);
999   ierr = PetscFree(recvs);CHKERRQ(ierr);
1000   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1001   ierr = PetscFree(sends2);CHKERRQ(ierr);
1002 
1003   /* put the information about myself as the first entry in the list */
1004   first_procs    = (*procs)[0];
1005   first_numprocs = (*numprocs)[0];
1006   first_indices  = (*indices)[0];
1007   for (i=0; i<*nproc; i++) {
1008     if ((*procs)[i] == rank) {
1009       (*procs)[0]    = (*procs)[i];
1010       (*numprocs)[0] = (*numprocs)[i];
1011       (*indices)[0]  = (*indices)[i];
1012       (*procs)[i]    = first_procs;
1013       (*numprocs)[i] = first_numprocs;
1014       (*indices)[i]  = first_indices;
1015       break;
1016     }
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 #undef __FUNCT__
1022 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1023 /*@C
1024     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1025 
1026     Collective on ISLocalToGlobalMapping
1027 
1028     Input Parameters:
1029 .   mapping - the mapping from local to global indexing
1030 
1031     Output Parameter:
1032 +   nproc - number of processors that are connected to this one
1033 .   proc - neighboring processors
1034 .   numproc - number of indices for each processor
1035 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1036 
1037     Level: advanced
1038 
1039 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1040           ISLocalToGlobalMappingGetInfo()
1041 @*/
1042 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1043 {
1044   PetscErrorCode ierr;
1045   PetscInt       i;
1046 
1047   PetscFunctionBegin;
1048   ierr = PetscFree(*procs);CHKERRQ(ierr);
1049   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1050   if (*indices) {
1051     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1052     for (i=1; i<*nproc; i++) {
1053       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1054     }
1055     ierr = PetscFree(*indices);CHKERRQ(ierr);
1056   }
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1062 /*@C
1063    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1064 
1065    Not Collective
1066 
1067    Input Arguments:
1068 . ltog - local to global mapping
1069 
1070    Output Arguments:
1071 . array - array of indices
1072 
1073    Level: advanced
1074 
1075 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1076 @*/
1077 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1078 {
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1081   PetscValidPointer(array,2);
1082   *array = ltog->indices;
1083   PetscFunctionReturn(0);
1084 }
1085 
1086 #undef __FUNCT__
1087 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1088 /*@C
1089    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1090 
1091    Not Collective
1092 
1093    Input Arguments:
1094 + ltog - local to global mapping
1095 - array - array of indices
1096 
1097    Level: advanced
1098 
1099 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1100 @*/
1101 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1102 {
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1105   PetscValidPointer(array,2);
1106   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1107   *array = NULL;
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 #undef __FUNCT__
1112 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1113 /*@C
1114    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1115 
1116    Not Collective
1117 
1118    Input Arguments:
1119 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1120 . n - number of mappings to concatenate
1121 - ltogs - local to global mappings
1122 
1123    Output Arguments:
1124 . ltogcat - new mapping
1125 
1126    Level: advanced
1127 
1128 .seealso: ISLocalToGlobalMappingCreate()
1129 @*/
1130 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1131 {
1132   PetscInt       i,cnt,m,*idx;
1133   PetscErrorCode ierr;
1134 
1135   PetscFunctionBegin;
1136   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1137   if (n > 0) PetscValidPointer(ltogs,3);
1138   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1139   PetscValidPointer(ltogcat,4);
1140   for (cnt=0,i=0; i<n; i++) {
1141     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1142     cnt += m;
1143   }
1144   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1145   for (cnt=0,i=0; i<n; i++) {
1146     const PetscInt *subidx;
1147     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1148     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1149     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1150     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1151     cnt += m;
1152   }
1153   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1154   PetscFunctionReturn(0);
1155 }
1156 
1157 
1158