xref: /petsc/src/vec/is/utils/isltog.c (revision 245f004be2b2fca55a62739415aedaade1b4b42e)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21     Concepts: mapping^local to global
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
29   PetscValidIntPointer(n,2);
30   *n = mapping->n;
31   PetscFunctionReturn(0);
32 }
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "ISLocalToGlobalMappingView"
36 /*@C
37     ISLocalToGlobalMappingView - View a local to global mapping
38 
39     Not Collective
40 
41     Input Parameters:
42 +   ltog - local to global mapping
43 -   viewer - viewer
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
52 {
53   PetscInt       i;
54   PetscMPIInt    rank;
55   PetscBool      iascii;
56   PetscErrorCode ierr;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
60   if (!viewer) {
61     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
62   }
63   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
64 
65   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
66   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
67   if (iascii) {
68     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
74   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local number
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   PetscInt       n;
102   const PetscInt *indices;
103   MPI_Comm       comm;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(is,IS_CLASSID,1);
107   PetscValidPointer(mapping,2);
108 
109   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
110   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
111   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
112   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
113   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
119 /*@C
120     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
121     ordering and a global parallel ordering.
122 
123     Collective
124 
125     Input Parameter:
126 +   sf - star forest mapping contiguous local indices to (rank, offset)
127 -   start - first global index on this process
128 
129     Output Parameter:
130 .   mapping - new mapping data structure
131 
132     Level: advanced
133 
134     Concepts: mapping^local to global
135 
136 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
137 @*/
138 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
139 {
140   PetscErrorCode ierr;
141   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
142   const PetscInt *ilocal;
143   MPI_Comm       comm;
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
147   PetscValidPointer(mapping,3);
148 
149   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
150   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
151   if (ilocal) {
152     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
153   }
154   else maxlocal = nleaves;
155   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
156   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
157   for (i=0; i<nroots; i++) globals[i] = start + i;
158   for (i=0; i<maxlocal; i++) ltog[i] = -1;
159   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
160   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
161   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
162   ierr = PetscFree(globals);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
168 /*@
169     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
170     ordering and a global parallel ordering.
171 
172     Not Collective, but communicator may have more than one process
173 
174     Input Parameters:
175 +   comm - MPI communicator
176 .   n - the number of local elements
177 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
178 -   mode - see PetscCopyMode
179 
180     Output Parameter:
181 .   mapping - new mapping data structure
182 
183     Level: advanced
184 
185     Concepts: mapping^local to global
186 
187 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
188 @*/
189 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
190 {
191   PetscErrorCode ierr;
192   PetscInt       *in;
193 
194   PetscFunctionBegin;
195   if (n) PetscValidIntPointer(indices,3);
196   PetscValidPointer(mapping,4);
197 
198   *mapping = NULL;
199 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
200   ierr = ISInitializePackage(NULL);CHKERRQ(ierr);
201 #endif
202 
203   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
204                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
205   (*mapping)->n = n;
206   /*
207     Do not create the global to local mapping. This is only created if
208     ISGlobalToLocalMapping() is called
209   */
210   (*mapping)->globals = 0;
211   if (mode == PETSC_COPY_VALUES) {
212     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
213     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
214     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
215     (*mapping)->indices = in;
216   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
217   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
223 /*@
224     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
225        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
226        and VecSetLocalToGlobalMappingBlock().
227 
228     Not Collective, but communicator may have more than one process
229 
230     Input Parameters:
231 +    inmap - original point-wise mapping
232 -    bs - block size
233 
234     Output Parameter:
235 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
242 @*/
243 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
244 {
245   PetscErrorCode ierr;
246   PetscInt       *ii,i,n;
247 
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
250   PetscValidPointer(outmap,3);
251   if (bs > 1) {
252     n = inmap->n/bs;
253     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
254     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
255     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
256     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
257   } else {
258     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
259     *outmap = inmap;
260   }
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
266 /*@
267     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
268        ISLocalToGlobalMapping
269 
270     Not Collective, but communicator may have more than one process
271 
272     Input Parameter:
273 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
274 - bs - block size
275 
276     Output Parameter:
277 .   outmap - pointwise mapping
278 
279     Level: advanced
280 
281     Concepts: mapping^local to global
282 
283 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
284 @*/
285 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
286 {
287   PetscErrorCode ierr;
288   PetscInt       *ii,i,n;
289 
290   PetscFunctionBegin;
291   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
292   PetscValidPointer(outmap,2);
293   if (bs > 1) {
294     n    = inmap->n*bs;
295     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
296     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
297     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
298   } else {
299     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
300     *outmap = inmap;
301   }
302   PetscFunctionReturn(0);
303 }
304 
305 #undef __FUNCT__
306 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
307 /*@
308    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
309    ordering and a global parallel ordering.
310 
311    Note Collective
312 
313    Input Parameters:
314 .  mapping - mapping data structure
315 
316    Level: advanced
317 
318 .seealso: ISLocalToGlobalMappingCreate()
319 @*/
320 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
321 {
322   PetscErrorCode ierr;
323 
324   PetscFunctionBegin;
325   if (!*mapping) PetscFunctionReturn(0);
326   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
327   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
328   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
329   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
330   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
331   *mapping = 0;
332   PetscFunctionReturn(0);
333 }
334 
335 #undef __FUNCT__
336 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
337 /*@
338     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
339     a new index set using the global numbering defined in an ISLocalToGlobalMapping
340     context.
341 
342     Not collective
343 
344     Input Parameters:
345 +   mapping - mapping between local and global numbering
346 -   is - index set in local numbering
347 
348     Output Parameters:
349 .   newis - index set in global numbering
350 
351     Level: advanced
352 
353     Concepts: mapping^local to global
354 
355 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
356           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
357 @*/
358 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
359 {
360   PetscErrorCode ierr;
361   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
362   const PetscInt *idxin;
363 
364   PetscFunctionBegin;
365   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
366   PetscValidHeaderSpecific(is,IS_CLASSID,2);
367   PetscValidPointer(newis,3);
368 
369   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
370   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
371   idxmap = mapping->indices;
372 
373   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
374   for (i=0; i<n; i++) {
375     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
376     idxout[i] = idxmap[idxin[i]];
377   }
378   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
379   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "ISLocalToGlobalMappingApply"
385 /*@C
386    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
387    and converts them to the global numbering.
388 
389    Not collective
390 
391    Input Parameters:
392 +  mapping - the local to global mapping context
393 .  N - number of integers
394 -  in - input indices in local numbering
395 
396    Output Parameter:
397 .  out - indices in global numbering
398 
399    Notes:
400    The in and out array parameters may be identical.
401 
402    Level: advanced
403 
404 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
405           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
406           AOPetscToApplication(), ISGlobalToLocalMappingApply()
407 
408     Concepts: mapping^local to global
409 @*/
410 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
411 {
412   PetscInt       i,Nmax = mapping->n;
413   const PetscInt *idx = mapping->indices;
414 
415   PetscFunctionBegin;
416   for (i=0; i<N; i++) {
417     if (in[i] < 0) {
418       out[i] = in[i];
419       continue;
420     }
421     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
422     out[i] = idx[in[i]];
423   }
424   PetscFunctionReturn(0);
425 }
426 
427 /* -----------------------------------------------------------------------------------------*/
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
431 /*
432     Creates the global fields in the ISLocalToGlobalMapping structure
433 */
434 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
435 {
436   PetscErrorCode ierr;
437   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
438 
439   PetscFunctionBegin;
440   end   = 0;
441   start = PETSC_MAX_INT;
442 
443   for (i=0; i<n; i++) {
444     if (idx[i] < 0) continue;
445     if (idx[i] < start) start = idx[i];
446     if (idx[i] > end)   end   = idx[i];
447   }
448   if (start > end) {start = 0; end = -1;}
449   mapping->globalstart = start;
450   mapping->globalend   = end;
451 
452   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
453   mapping->globals = globals;
454   for (i=0; i<end-start+1; i++) globals[i] = -1;
455   for (i=0; i<n; i++) {
456     if (idx[i] < 0) continue;
457     globals[idx[i] - start] = i;
458   }
459 
460   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "ISGlobalToLocalMappingApply"
466 /*@
467     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
468     specified with a global numbering.
469 
470     Not collective
471 
472     Input Parameters:
473 +   mapping - mapping between local and global numbering
474 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
475            IS_GTOLM_DROP - drops the indices with no local value from the output list
476 .   n - number of global indices to map
477 -   idx - global indices to map
478 
479     Output Parameters:
480 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
481 -   idxout - local index of each global index, one must pass in an array long enough
482              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
483              idxout == NULL to determine the required length (returned in nout)
484              and then allocate the required space and call ISGlobalToLocalMappingApply()
485              a second time to set the values.
486 
487     Notes:
488     Either nout or idxout may be NULL. idx and idxout may be identical.
489 
490     This is not scalable in memory usage. Each processor requires O(Nglobal) size
491     array to compute these.
492 
493     Level: advanced
494 
495     Developer Note: The manual page states that idx and idxout may be identical but the calling
496        sequence declares idx as const so it cannot be the same as idxout.
497 
498     Concepts: mapping^global to local
499 
500 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
501           ISLocalToGlobalMappingDestroy()
502 @*/
503 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
504                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
505 {
506   PetscInt       i,*globals,nf = 0,tmp,start,end;
507   PetscErrorCode ierr;
508 
509   PetscFunctionBegin;
510   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
511   if (!mapping->globals) {
512     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
513   }
514   globals = mapping->globals;
515   start   = mapping->globalstart;
516   end     = mapping->globalend;
517 
518   if (type == IS_GTOLM_MASK) {
519     if (idxout) {
520       for (i=0; i<n; i++) {
521         if (idx[i] < 0) idxout[i] = idx[i];
522         else if (idx[i] < start) idxout[i] = -1;
523         else if (idx[i] > end)   idxout[i] = -1;
524         else                     idxout[i] = globals[idx[i] - start];
525       }
526     }
527     if (nout) *nout = n;
528   } else {
529     if (idxout) {
530       for (i=0; i<n; i++) {
531         if (idx[i] < 0) continue;
532         if (idx[i] < start) continue;
533         if (idx[i] > end) continue;
534         tmp = globals[idx[i] - start];
535         if (tmp < 0) continue;
536         idxout[nf++] = tmp;
537       }
538     } else {
539       for (i=0; i<n; i++) {
540         if (idx[i] < 0) continue;
541         if (idx[i] < start) continue;
542         if (idx[i] > end) continue;
543         tmp = globals[idx[i] - start];
544         if (tmp < 0) continue;
545         nf++;
546       }
547     }
548     if (nout) *nout = nf;
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 #undef __FUNCT__
554 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
555 /*@C
556     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
557      each index shared by more than one processor
558 
559     Collective on ISLocalToGlobalMapping
560 
561     Input Parameters:
562 .   mapping - the mapping from local to global indexing
563 
564     Output Parameter:
565 +   nproc - number of processors that are connected to this one
566 .   proc - neighboring processors
567 .   numproc - number of indices for each subdomain (processor)
568 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
569 
570     Level: advanced
571 
572     Concepts: mapping^local to global
573 
574     Fortran Usage:
575 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
576 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
577           PetscInt indices[nproc][numprocmax],ierr)
578         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
579         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
580 
581 
582 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
583           ISLocalToGlobalMappingRestoreInfo()
584 @*/
585 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
586 {
587   PetscErrorCode ierr;
588   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
589   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
590   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
591   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
592   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
593   PetscInt       first_procs,first_numprocs,*first_indices;
594   MPI_Request    *recv_waits,*send_waits;
595   MPI_Status     recv_status,*send_status,*recv_statuses;
596   MPI_Comm       comm  = ((PetscObject)mapping)->comm;
597   PetscBool      debug = PETSC_FALSE;
598 
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
601   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
602   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
603   if (size == 1) {
604     *nproc         = 0;
605     *procs         = NULL;
606     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
607     (*numprocs)[0] = 0;
608     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
609     (*indices)[0]  = NULL;
610     PetscFunctionReturn(0);
611   }
612 
613   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
614 
615   /*
616     Notes on ISLocalToGlobalMappingGetInfo
617 
618     globally owned node - the nodes that have been assigned to this processor in global
619            numbering, just for this routine.
620 
621     nontrivial globally owned node - node assigned to this processor that is on a subdomain
622            boundary (i.e. is has more than one local owner)
623 
624     locally owned node - node that exists on this processors subdomain
625 
626     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
627            local subdomain
628   */
629   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
630   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
631   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
632 
633   for (i=0; i<n; i++) {
634     if (lindices[i] > max) max = lindices[i];
635   }
636   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
637   Ng++;
638   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
639   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
640   scale  = Ng/size + 1;
641   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
642   rstart = scale*rank;
643 
644   /* determine ownership ranges of global indices */
645   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
646   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
647 
648   /* determine owners of each local node  */
649   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
650   for (i=0; i<n; i++) {
651     proc             = lindices[i]/scale; /* processor that globally owns this index */
652     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
653     owner[i]         = proc;
654     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
655   }
656   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
657   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
658 
659   /* inform other processors of number of messages and max length*/
660   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
661   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
662 
663   /* post receives for owned rows */
664   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
665   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
666   for (i=0; i<nrecvs; i++) {
667     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
668   }
669 
670   /* pack messages containing lists of local nodes to owners */
671   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
672   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
673   starts[0] = 0;
674   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
675   for (i=0; i<n; i++) {
676     sends[starts[owner[i]]++] = lindices[i];
677     sends[starts[owner[i]]++] = i;
678   }
679   ierr = PetscFree(owner);CHKERRQ(ierr);
680   starts[0] = 0;
681   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
682 
683   /* send the messages */
684   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
685   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
686   cnt = 0;
687   for (i=0; i<size; i++) {
688     if (nprocs[2*i]) {
689       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
690       dest[cnt] = i;
691       cnt++;
692     }
693   }
694   ierr = PetscFree(starts);CHKERRQ(ierr);
695 
696   /* wait on receives */
697   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
698   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
699   cnt  = nrecvs;
700   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
701   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
702   while (cnt) {
703     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
704     /* unpack receives into our local space */
705     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
706     source[imdex] = recv_status.MPI_SOURCE;
707     len[imdex]    = len[imdex]/2;
708     /* count how many local owners for each of my global owned indices */
709     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
710     cnt--;
711   }
712   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
713 
714   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
715   nowned  = 0;
716   nownedm = 0;
717   for (i=0; i<ng; i++) {
718     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
719   }
720 
721   /* create single array to contain rank of all local owners of each globally owned index */
722   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
723   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
724   starts[0] = 0;
725   for (i=1; i<ng; i++) {
726     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
727     else starts[i] = starts[i-1];
728   }
729 
730   /* for each nontrival globally owned node list all arriving processors */
731   for (i=0; i<nrecvs; i++) {
732     for (j=0; j<len[i]; j++) {
733       node = recvs[2*i*nmax+2*j]-rstart;
734       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
735     }
736   }
737 
738   if (debug) { /* -----------------------------------  */
739     starts[0] = 0;
740     for (i=1; i<ng; i++) {
741       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
742       else starts[i] = starts[i-1];
743     }
744     for (i=0; i<ng; i++) {
745       if (nownedsenders[i] > 1) {
746         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
747         for (j=0; j<nownedsenders[i]; j++) {
748           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
749         }
750         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
751       }
752     }
753     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
754   } /* -----------------------------------  */
755 
756   /* wait on original sends */
757   if (nsends) {
758     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
759     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
760     ierr = PetscFree(send_status);CHKERRQ(ierr);
761   }
762   ierr = PetscFree(send_waits);CHKERRQ(ierr);
763   ierr = PetscFree(sends);CHKERRQ(ierr);
764   ierr = PetscFree(nprocs);CHKERRQ(ierr);
765 
766   /* pack messages to send back to local owners */
767   starts[0] = 0;
768   for (i=1; i<ng; i++) {
769     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
770     else starts[i] = starts[i-1];
771   }
772   nsends2 = nrecvs;
773   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
774   for (i=0; i<nrecvs; i++) {
775     nprocs[i] = 1;
776     for (j=0; j<len[i]; j++) {
777       node = recvs[2*i*nmax+2*j]-rstart;
778       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
779     }
780   }
781   nt = 0;
782   for (i=0; i<nsends2; i++) nt += nprocs[i];
783 
784   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
785   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
786 
787   starts2[0] = 0;
788   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
789   /*
790      Each message is 1 + nprocs[i] long, and consists of
791        (0) the number of nodes being sent back
792        (1) the local node number,
793        (2) the number of processors sharing it,
794        (3) the processors sharing it
795   */
796   for (i=0; i<nsends2; i++) {
797     cnt = 1;
798     sends2[starts2[i]] = 0;
799     for (j=0; j<len[i]; j++) {
800       node = recvs[2*i*nmax+2*j]-rstart;
801       if (nownedsenders[node] > 1) {
802         sends2[starts2[i]]++;
803         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
804         sends2[starts2[i]+cnt++] = nownedsenders[node];
805         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
806         cnt += nownedsenders[node];
807       }
808     }
809   }
810 
811   /* receive the message lengths */
812   nrecvs2 = nsends;
813   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
814   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
815   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
816   for (i=0; i<nrecvs2; i++) {
817     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
818   }
819 
820   /* send the message lengths */
821   for (i=0; i<nsends2; i++) {
822     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
823   }
824 
825   /* wait on receives of lens */
826   if (nrecvs2) {
827     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
828     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
829     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
830   }
831   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
832 
833   starts3[0] = 0;
834   nt         = 0;
835   for (i=0; i<nrecvs2-1; i++) {
836     starts3[i+1] = starts3[i] + lens2[i];
837     nt          += lens2[i];
838   }
839   if (nrecvs2) nt += lens2[nrecvs2-1];
840 
841   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
842   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
843   for (i=0; i<nrecvs2; i++) {
844     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
845   }
846 
847   /* send the messages */
848   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
849   for (i=0; i<nsends2; i++) {
850     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
851   }
852 
853   /* wait on receives */
854   if (nrecvs2) {
855     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
856     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
857     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
858   }
859   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
860   ierr = PetscFree(nprocs);CHKERRQ(ierr);
861 
862   if (debug) { /* -----------------------------------  */
863     cnt = 0;
864     for (i=0; i<nrecvs2; i++) {
865       nt = recvs2[cnt++];
866       for (j=0; j<nt; j++) {
867         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
868         for (k=0; k<recvs2[cnt+1]; k++) {
869           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
870         }
871         cnt += 2 + recvs2[cnt+1];
872         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
873       }
874     }
875     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
876   } /* -----------------------------------  */
877 
878   /* count number subdomains for each local node */
879   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
880   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
881   cnt  = 0;
882   for (i=0; i<nrecvs2; i++) {
883     nt = recvs2[cnt++];
884     for (j=0; j<nt; j++) {
885       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
886       cnt += 2 + recvs2[cnt+1];
887     }
888   }
889   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
890   *nproc    = nt;
891   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
892   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
893   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
894   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
895   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
896   cnt       = 0;
897   for (i=0; i<size; i++) {
898     if (nprocs[i] > 0) {
899       bprocs[i]        = cnt;
900       (*procs)[cnt]    = i;
901       (*numprocs)[cnt] = nprocs[i];
902       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
903       cnt++;
904     }
905   }
906 
907   /* make the list of subdomains for each nontrivial local node */
908   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
909   cnt  = 0;
910   for (i=0; i<nrecvs2; i++) {
911     nt = recvs2[cnt++];
912     for (j=0; j<nt; j++) {
913       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
914       cnt += 2 + recvs2[cnt+1];
915     }
916   }
917   ierr = PetscFree(bprocs);CHKERRQ(ierr);
918   ierr = PetscFree(recvs2);CHKERRQ(ierr);
919 
920   /* sort the node indexing by their global numbers */
921   nt = *nproc;
922   for (i=0; i<nt; i++) {
923     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
924     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
925     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
926     ierr = PetscFree(tmp);CHKERRQ(ierr);
927   }
928 
929   if (debug) { /* -----------------------------------  */
930     nt = *nproc;
931     for (i=0; i<nt; i++) {
932       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
933       for (j=0; j<(*numprocs)[i]; j++) {
934         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
935       }
936       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
937     }
938     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
939   } /* -----------------------------------  */
940 
941   /* wait on sends */
942   if (nsends2) {
943     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
944     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
945     ierr = PetscFree(send_status);CHKERRQ(ierr);
946   }
947 
948   ierr = PetscFree(starts3);CHKERRQ(ierr);
949   ierr = PetscFree(dest);CHKERRQ(ierr);
950   ierr = PetscFree(send_waits);CHKERRQ(ierr);
951 
952   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
953   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
954   ierr = PetscFree(starts);CHKERRQ(ierr);
955   ierr = PetscFree(starts2);CHKERRQ(ierr);
956   ierr = PetscFree(lens2);CHKERRQ(ierr);
957 
958   ierr = PetscFree(source);CHKERRQ(ierr);
959   ierr = PetscFree(len);CHKERRQ(ierr);
960   ierr = PetscFree(recvs);CHKERRQ(ierr);
961   ierr = PetscFree(nprocs);CHKERRQ(ierr);
962   ierr = PetscFree(sends2);CHKERRQ(ierr);
963 
964   /* put the information about myself as the first entry in the list */
965   first_procs    = (*procs)[0];
966   first_numprocs = (*numprocs)[0];
967   first_indices  = (*indices)[0];
968   for (i=0; i<*nproc; i++) {
969     if ((*procs)[i] == rank) {
970       (*procs)[0]    = (*procs)[i];
971       (*numprocs)[0] = (*numprocs)[i];
972       (*indices)[0]  = (*indices)[i];
973       (*procs)[i]    = first_procs;
974       (*numprocs)[i] = first_numprocs;
975       (*indices)[i]  = first_indices;
976       break;
977     }
978   }
979   PetscFunctionReturn(0);
980 }
981 
982 #undef __FUNCT__
983 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
984 /*@C
985     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
986 
987     Collective on ISLocalToGlobalMapping
988 
989     Input Parameters:
990 .   mapping - the mapping from local to global indexing
991 
992     Output Parameter:
993 +   nproc - number of processors that are connected to this one
994 .   proc - neighboring processors
995 .   numproc - number of indices for each processor
996 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
997 
998     Level: advanced
999 
1000 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1001           ISLocalToGlobalMappingGetInfo()
1002 @*/
1003 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1004 {
1005   PetscErrorCode ierr;
1006   PetscInt       i;
1007 
1008   PetscFunctionBegin;
1009   ierr = PetscFree(*procs);CHKERRQ(ierr);
1010   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1011   if (*indices) {
1012     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1013     for (i=1; i<*nproc; i++) {
1014       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1015     }
1016     ierr = PetscFree(*indices);CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 #undef __FUNCT__
1022 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1023 /*@C
1024    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1025 
1026    Not Collective
1027 
1028    Input Arguments:
1029 . ltog - local to global mapping
1030 
1031    Output Arguments:
1032 . array - array of indices
1033 
1034    Level: advanced
1035 
1036 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1037 @*/
1038 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1039 {
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1042   PetscValidPointer(array,2);
1043   *array = ltog->indices;
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 #undef __FUNCT__
1048 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1049 /*@C
1050    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1051 
1052    Not Collective
1053 
1054    Input Arguments:
1055 + ltog - local to global mapping
1056 - array - array of indices
1057 
1058    Level: advanced
1059 
1060 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1061 @*/
1062 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1063 {
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1066   PetscValidPointer(array,2);
1067   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1068   *array = NULL;
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1074 /*@C
1075    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1076 
1077    Not Collective
1078 
1079    Input Arguments:
1080 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1081 . n - number of mappings to concatenate
1082 - ltogs - local to global mappings
1083 
1084    Output Arguments:
1085 . ltogcat - new mapping
1086 
1087    Level: advanced
1088 
1089 .seealso: ISLocalToGlobalMappingCreate()
1090 @*/
1091 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1092 {
1093   PetscInt       i,cnt,m,*idx;
1094   PetscErrorCode ierr;
1095 
1096   PetscFunctionBegin;
1097   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1098   if (n > 0) PetscValidPointer(ltogs,3);
1099   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1100   PetscValidPointer(ltogcat,4);
1101   for (cnt=0,i=0; i<n; i++) {
1102     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1103     cnt += m;
1104   }
1105   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1106   for (cnt=0,i=0; i<n; i++) {
1107     const PetscInt *subidx;
1108     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1109     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1110     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1111     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1112     cnt += m;
1113   }
1114   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1115   PetscFunctionReturn(0);
1116 }
1117