xref: /petsc/src/vec/is/utils/isltog.c (revision ce94432eddcd14845bc7e8083b7f8ea723b9bf7d)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21     Concepts: mapping^local to global
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
29   PetscValidIntPointer(n,2);
30   *n = mapping->n;
31   PetscFunctionReturn(0);
32 }
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "ISLocalToGlobalMappingView"
36 /*@C
37     ISLocalToGlobalMappingView - View a local to global mapping
38 
39     Not Collective
40 
41     Input Parameters:
42 +   ltog - local to global mapping
43 -   viewer - viewer
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
52 {
53   PetscInt       i;
54   PetscMPIInt    rank;
55   PetscBool      iascii;
56   PetscErrorCode ierr;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
60   if (!viewer) {
61     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
62   }
63   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
64 
65   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
66   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
67   if (iascii) {
68     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
74   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local number
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   PetscInt       n;
102   const PetscInt *indices;
103   MPI_Comm       comm;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(is,IS_CLASSID,1);
107   PetscValidPointer(mapping,2);
108 
109   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
110   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
111   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
112   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
113   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
119 /*@C
120     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
121     ordering and a global parallel ordering.
122 
123     Collective
124 
125     Input Parameter:
126 +   sf - star forest mapping contiguous local indices to (rank, offset)
127 -   start - first global index on this process
128 
129     Output Parameter:
130 .   mapping - new mapping data structure
131 
132     Level: advanced
133 
134     Concepts: mapping^local to global
135 
136 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
137 @*/
138 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
139 {
140   PetscErrorCode ierr;
141   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
142   const PetscInt *ilocal;
143   MPI_Comm       comm;
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
147   PetscValidPointer(mapping,3);
148 
149   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
150   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
151   if (ilocal) {
152     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
153   }
154   else maxlocal = nleaves;
155   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
156   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
157   for (i=0; i<nroots; i++) globals[i] = start + i;
158   for (i=0; i<maxlocal; i++) ltog[i] = -1;
159   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
160   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
161   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
162   ierr = PetscFree(globals);CHKERRQ(ierr);
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
168 /*@
169     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
170     ordering and a global parallel ordering.
171 
172     Not Collective, but communicator may have more than one process
173 
174     Input Parameters:
175 +   comm - MPI communicator
176 .   n - the number of local elements
177 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
178 -   mode - see PetscCopyMode
179 
180     Output Parameter:
181 .   mapping - new mapping data structure
182 
183     Level: advanced
184 
185     Concepts: mapping^local to global
186 
187 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
188 @*/
189 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
190 {
191   PetscErrorCode ierr;
192   PetscInt       *in;
193 
194   PetscFunctionBegin;
195   if (n) PetscValidIntPointer(indices,3);
196   PetscValidPointer(mapping,4);
197 
198   *mapping = NULL;
199 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
200   ierr = ISInitializePackage(NULL);CHKERRQ(ierr);
201 #endif
202 
203   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
204                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
205   (*mapping)->n = n;
206   /*
207     Do not create the global to local mapping. This is only created if
208     ISGlobalToLocalMapping() is called
209   */
210   (*mapping)->globals = 0;
211   if (mode == PETSC_COPY_VALUES) {
212     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
213     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
214     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
215     (*mapping)->indices = in;
216   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
217   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
223 /*@
224     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
225        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
226        and VecSetLocalToGlobalMappingBlock().
227 
228     Not Collective, but communicator may have more than one process
229 
230     Input Parameters:
231 +    inmap - original point-wise mapping
232 -    bs - block size
233 
234     Output Parameter:
235 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
242 @*/
243 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
244 {
245   PetscErrorCode ierr;
246   PetscInt       *ii,i,n;
247 
248   PetscFunctionBegin;
249   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
250   PetscValidPointer(outmap,3);
251   if (bs > 1) {
252     n = inmap->n/bs;
253     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
254     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
255     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
256     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
257   } else {
258     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
259     *outmap = inmap;
260   }
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
266 /*@
267     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
268        ISLocalToGlobalMapping
269 
270     Not Collective, but communicator may have more than one process
271 
272     Input Parameter:
273 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
274 - bs - block size
275 
276     Output Parameter:
277 .   outmap - pointwise mapping
278 
279     Level: advanced
280 
281     Concepts: mapping^local to global
282 
283 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
284 @*/
285 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
286 {
287   PetscErrorCode ierr;
288   PetscInt       *ii,i,n;
289 
290   PetscFunctionBegin;
291   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
292   PetscValidPointer(outmap,2);
293   if (bs > 1) {
294     n    = inmap->n*bs;
295     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
296     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
297     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
298   } else {
299     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
300     *outmap = inmap;
301   }
302   PetscFunctionReturn(0);
303 }
304 
305 #undef __FUNCT__
306 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
307 /*@
308    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
309    ordering and a global parallel ordering.
310 
311    Note Collective
312 
313    Input Parameters:
314 .  mapping - mapping data structure
315 
316    Level: advanced
317 
318 .seealso: ISLocalToGlobalMappingCreate()
319 @*/
320 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
321 {
322   PetscErrorCode ierr;
323 
324   PetscFunctionBegin;
325   if (!*mapping) PetscFunctionReturn(0);
326   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
327   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
328   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
329   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
330   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
331   *mapping = 0;
332   PetscFunctionReturn(0);
333 }
334 
335 #undef __FUNCT__
336 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
337 /*@
338     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
339     a new index set using the global numbering defined in an ISLocalToGlobalMapping
340     context.
341 
342     Not collective
343 
344     Input Parameters:
345 +   mapping - mapping between local and global numbering
346 -   is - index set in local numbering
347 
348     Output Parameters:
349 .   newis - index set in global numbering
350 
351     Level: advanced
352 
353     Concepts: mapping^local to global
354 
355 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
356           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
357 @*/
358 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
359 {
360   PetscErrorCode ierr;
361   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
362   const PetscInt *idxin;
363 
364   PetscFunctionBegin;
365   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
366   PetscValidHeaderSpecific(is,IS_CLASSID,2);
367   PetscValidPointer(newis,3);
368 
369   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
370   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
371   idxmap = mapping->indices;
372 
373   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
374   for (i=0; i<n; i++) {
375     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
376     idxout[i] = idxmap[idxin[i]];
377   }
378   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
379   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "ISLocalToGlobalMappingApply"
385 /*@C
386    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
387    and converts them to the global numbering.
388 
389    Not collective
390 
391    Input Parameters:
392 +  mapping - the local to global mapping context
393 .  N - number of integers
394 -  in - input indices in local numbering
395 
396    Output Parameter:
397 .  out - indices in global numbering
398 
399    Notes:
400    The in and out array parameters may be identical.
401 
402    Level: advanced
403 
404 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
405           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
406           AOPetscToApplication(), ISGlobalToLocalMappingApply()
407 
408     Concepts: mapping^local to global
409 @*/
410 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
411 {
412   PetscInt       i,Nmax = mapping->n;
413   const PetscInt *idx = mapping->indices;
414 
415   PetscFunctionBegin;
416   for (i=0; i<N; i++) {
417     if (in[i] < 0) {
418       out[i] = in[i];
419       continue;
420     }
421     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
422     out[i] = idx[in[i]];
423   }
424   PetscFunctionReturn(0);
425 }
426 
427 /* -----------------------------------------------------------------------------------------*/
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
431 /*
432     Creates the global fields in the ISLocalToGlobalMapping structure
433 */
434 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
435 {
436   PetscErrorCode ierr;
437   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
438 
439   PetscFunctionBegin;
440   end   = 0;
441   start = PETSC_MAX_INT;
442 
443   for (i=0; i<n; i++) {
444     if (idx[i] < 0) continue;
445     if (idx[i] < start) start = idx[i];
446     if (idx[i] > end)   end   = idx[i];
447   }
448   if (start > end) {start = 0; end = -1;}
449   mapping->globalstart = start;
450   mapping->globalend   = end;
451 
452   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
453   mapping->globals = globals;
454   for (i=0; i<end-start+1; i++) globals[i] = -1;
455   for (i=0; i<n; i++) {
456     if (idx[i] < 0) continue;
457     globals[idx[i] - start] = i;
458   }
459 
460   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "ISGlobalToLocalMappingApply"
466 /*@
467     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
468     specified with a global numbering.
469 
470     Not collective
471 
472     Input Parameters:
473 +   mapping - mapping between local and global numbering
474 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
475            IS_GTOLM_DROP - drops the indices with no local value from the output list
476 .   n - number of global indices to map
477 -   idx - global indices to map
478 
479     Output Parameters:
480 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
481 -   idxout - local index of each global index, one must pass in an array long enough
482              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
483              idxout == NULL to determine the required length (returned in nout)
484              and then allocate the required space and call ISGlobalToLocalMappingApply()
485              a second time to set the values.
486 
487     Notes:
488     Either nout or idxout may be NULL. idx and idxout may be identical.
489 
490     This is not scalable in memory usage. Each processor requires O(Nglobal) size
491     array to compute these.
492 
493     Level: advanced
494 
495     Developer Note: The manual page states that idx and idxout may be identical but the calling
496        sequence declares idx as const so it cannot be the same as idxout.
497 
498     Concepts: mapping^global to local
499 
500 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
501           ISLocalToGlobalMappingDestroy()
502 @*/
503 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
504                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
505 {
506   PetscInt       i,*globals,nf = 0,tmp,start,end;
507   PetscErrorCode ierr;
508 
509   PetscFunctionBegin;
510   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
511   if (!mapping->globals) {
512     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
513   }
514   globals = mapping->globals;
515   start   = mapping->globalstart;
516   end     = mapping->globalend;
517 
518   if (type == IS_GTOLM_MASK) {
519     if (idxout) {
520       for (i=0; i<n; i++) {
521         if (idx[i] < 0) idxout[i] = idx[i];
522         else if (idx[i] < start) idxout[i] = -1;
523         else if (idx[i] > end)   idxout[i] = -1;
524         else                     idxout[i] = globals[idx[i] - start];
525       }
526     }
527     if (nout) *nout = n;
528   } else {
529     if (idxout) {
530       for (i=0; i<n; i++) {
531         if (idx[i] < 0) continue;
532         if (idx[i] < start) continue;
533         if (idx[i] > end) continue;
534         tmp = globals[idx[i] - start];
535         if (tmp < 0) continue;
536         idxout[nf++] = tmp;
537       }
538     } else {
539       for (i=0; i<n; i++) {
540         if (idx[i] < 0) continue;
541         if (idx[i] < start) continue;
542         if (idx[i] > end) continue;
543         tmp = globals[idx[i] - start];
544         if (tmp < 0) continue;
545         nf++;
546       }
547     }
548     if (nout) *nout = nf;
549   }
550   PetscFunctionReturn(0);
551 }
552 
553 #undef __FUNCT__
554 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
555 /*@C
556     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
557      each index shared by more than one processor
558 
559     Collective on ISLocalToGlobalMapping
560 
561     Input Parameters:
562 .   mapping - the mapping from local to global indexing
563 
564     Output Parameter:
565 +   nproc - number of processors that are connected to this one
566 .   proc - neighboring processors
567 .   numproc - number of indices for each subdomain (processor)
568 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
569 
570     Level: advanced
571 
572     Concepts: mapping^local to global
573 
574     Fortran Usage:
575 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
576 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
577           PetscInt indices[nproc][numprocmax],ierr)
578         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
579         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
580 
581 
582 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
583           ISLocalToGlobalMappingRestoreInfo()
584 @*/
585 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
586 {
587   PetscErrorCode ierr;
588   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
589   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
590   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
591   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
592   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
593   PetscInt       first_procs,first_numprocs,*first_indices;
594   MPI_Request    *recv_waits,*send_waits;
595   MPI_Status     recv_status,*send_status,*recv_statuses;
596   MPI_Comm       comm;
597   PetscBool      debug = PETSC_FALSE;
598 
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
601   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
602   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
603   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
604   if (size == 1) {
605     *nproc         = 0;
606     *procs         = NULL;
607     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
608     (*numprocs)[0] = 0;
609     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
610     (*indices)[0]  = NULL;
611     PetscFunctionReturn(0);
612   }
613 
614   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
615 
616   /*
617     Notes on ISLocalToGlobalMappingGetInfo
618 
619     globally owned node - the nodes that have been assigned to this processor in global
620            numbering, just for this routine.
621 
622     nontrivial globally owned node - node assigned to this processor that is on a subdomain
623            boundary (i.e. is has more than one local owner)
624 
625     locally owned node - node that exists on this processors subdomain
626 
627     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
628            local subdomain
629   */
630   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
631   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
632   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
633 
634   for (i=0; i<n; i++) {
635     if (lindices[i] > max) max = lindices[i];
636   }
637   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
638   Ng++;
639   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
640   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
641   scale  = Ng/size + 1;
642   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
643   rstart = scale*rank;
644 
645   /* determine ownership ranges of global indices */
646   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
647   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
648 
649   /* determine owners of each local node  */
650   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
651   for (i=0; i<n; i++) {
652     proc             = lindices[i]/scale; /* processor that globally owns this index */
653     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
654     owner[i]         = proc;
655     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
656   }
657   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
658   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
659 
660   /* inform other processors of number of messages and max length*/
661   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
662   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
663 
664   /* post receives for owned rows */
665   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
666   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
667   for (i=0; i<nrecvs; i++) {
668     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
669   }
670 
671   /* pack messages containing lists of local nodes to owners */
672   ierr      = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
673   ierr      = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
674   starts[0] = 0;
675   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
676   for (i=0; i<n; i++) {
677     sends[starts[owner[i]]++] = lindices[i];
678     sends[starts[owner[i]]++] = i;
679   }
680   ierr = PetscFree(owner);CHKERRQ(ierr);
681   starts[0] = 0;
682   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
683 
684   /* send the messages */
685   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
686   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
687   cnt = 0;
688   for (i=0; i<size; i++) {
689     if (nprocs[2*i]) {
690       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
691       dest[cnt] = i;
692       cnt++;
693     }
694   }
695   ierr = PetscFree(starts);CHKERRQ(ierr);
696 
697   /* wait on receives */
698   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
699   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
700   cnt  = nrecvs;
701   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
702   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
703   while (cnt) {
704     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
705     /* unpack receives into our local space */
706     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
707     source[imdex] = recv_status.MPI_SOURCE;
708     len[imdex]    = len[imdex]/2;
709     /* count how many local owners for each of my global owned indices */
710     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
711     cnt--;
712   }
713   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
714 
715   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
716   nowned  = 0;
717   nownedm = 0;
718   for (i=0; i<ng; i++) {
719     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
720   }
721 
722   /* create single array to contain rank of all local owners of each globally owned index */
723   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
724   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
725   starts[0] = 0;
726   for (i=1; i<ng; i++) {
727     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
728     else starts[i] = starts[i-1];
729   }
730 
731   /* for each nontrival globally owned node list all arriving processors */
732   for (i=0; i<nrecvs; i++) {
733     for (j=0; j<len[i]; j++) {
734       node = recvs[2*i*nmax+2*j]-rstart;
735       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
736     }
737   }
738 
739   if (debug) { /* -----------------------------------  */
740     starts[0] = 0;
741     for (i=1; i<ng; i++) {
742       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
743       else starts[i] = starts[i-1];
744     }
745     for (i=0; i<ng; i++) {
746       if (nownedsenders[i] > 1) {
747         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
748         for (j=0; j<nownedsenders[i]; j++) {
749           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
750         }
751         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
752       }
753     }
754     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
755   } /* -----------------------------------  */
756 
757   /* wait on original sends */
758   if (nsends) {
759     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
760     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
761     ierr = PetscFree(send_status);CHKERRQ(ierr);
762   }
763   ierr = PetscFree(send_waits);CHKERRQ(ierr);
764   ierr = PetscFree(sends);CHKERRQ(ierr);
765   ierr = PetscFree(nprocs);CHKERRQ(ierr);
766 
767   /* pack messages to send back to local owners */
768   starts[0] = 0;
769   for (i=1; i<ng; i++) {
770     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
771     else starts[i] = starts[i-1];
772   }
773   nsends2 = nrecvs;
774   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
775   for (i=0; i<nrecvs; i++) {
776     nprocs[i] = 1;
777     for (j=0; j<len[i]; j++) {
778       node = recvs[2*i*nmax+2*j]-rstart;
779       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
780     }
781   }
782   nt = 0;
783   for (i=0; i<nsends2; i++) nt += nprocs[i];
784 
785   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
786   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
787 
788   starts2[0] = 0;
789   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
790   /*
791      Each message is 1 + nprocs[i] long, and consists of
792        (0) the number of nodes being sent back
793        (1) the local node number,
794        (2) the number of processors sharing it,
795        (3) the processors sharing it
796   */
797   for (i=0; i<nsends2; i++) {
798     cnt = 1;
799     sends2[starts2[i]] = 0;
800     for (j=0; j<len[i]; j++) {
801       node = recvs[2*i*nmax+2*j]-rstart;
802       if (nownedsenders[node] > 1) {
803         sends2[starts2[i]]++;
804         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
805         sends2[starts2[i]+cnt++] = nownedsenders[node];
806         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
807         cnt += nownedsenders[node];
808       }
809     }
810   }
811 
812   /* receive the message lengths */
813   nrecvs2 = nsends;
814   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
815   ierr    = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
816   ierr    = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
817   for (i=0; i<nrecvs2; i++) {
818     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
819   }
820 
821   /* send the message lengths */
822   for (i=0; i<nsends2; i++) {
823     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
824   }
825 
826   /* wait on receives of lens */
827   if (nrecvs2) {
828     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
829     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
830     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
831   }
832   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
833 
834   starts3[0] = 0;
835   nt         = 0;
836   for (i=0; i<nrecvs2-1; i++) {
837     starts3[i+1] = starts3[i] + lens2[i];
838     nt          += lens2[i];
839   }
840   if (nrecvs2) nt += lens2[nrecvs2-1];
841 
842   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
843   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
844   for (i=0; i<nrecvs2; i++) {
845     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
846   }
847 
848   /* send the messages */
849   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
850   for (i=0; i<nsends2; i++) {
851     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
852   }
853 
854   /* wait on receives */
855   if (nrecvs2) {
856     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
857     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
858     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
859   }
860   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
861   ierr = PetscFree(nprocs);CHKERRQ(ierr);
862 
863   if (debug) { /* -----------------------------------  */
864     cnt = 0;
865     for (i=0; i<nrecvs2; i++) {
866       nt = recvs2[cnt++];
867       for (j=0; j<nt; j++) {
868         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
869         for (k=0; k<recvs2[cnt+1]; k++) {
870           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
871         }
872         cnt += 2 + recvs2[cnt+1];
873         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
874       }
875     }
876     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
877   } /* -----------------------------------  */
878 
879   /* count number subdomains for each local node */
880   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
881   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
882   cnt  = 0;
883   for (i=0; i<nrecvs2; i++) {
884     nt = recvs2[cnt++];
885     for (j=0; j<nt; j++) {
886       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
887       cnt += 2 + recvs2[cnt+1];
888     }
889   }
890   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
891   *nproc    = nt;
892   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
893   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
894   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
895   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
896   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
897   cnt       = 0;
898   for (i=0; i<size; i++) {
899     if (nprocs[i] > 0) {
900       bprocs[i]        = cnt;
901       (*procs)[cnt]    = i;
902       (*numprocs)[cnt] = nprocs[i];
903       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
904       cnt++;
905     }
906   }
907 
908   /* make the list of subdomains for each nontrivial local node */
909   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
910   cnt  = 0;
911   for (i=0; i<nrecvs2; i++) {
912     nt = recvs2[cnt++];
913     for (j=0; j<nt; j++) {
914       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
915       cnt += 2 + recvs2[cnt+1];
916     }
917   }
918   ierr = PetscFree(bprocs);CHKERRQ(ierr);
919   ierr = PetscFree(recvs2);CHKERRQ(ierr);
920 
921   /* sort the node indexing by their global numbers */
922   nt = *nproc;
923   for (i=0; i<nt; i++) {
924     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
925     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
926     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
927     ierr = PetscFree(tmp);CHKERRQ(ierr);
928   }
929 
930   if (debug) { /* -----------------------------------  */
931     nt = *nproc;
932     for (i=0; i<nt; i++) {
933       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
934       for (j=0; j<(*numprocs)[i]; j++) {
935         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
936       }
937       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
938     }
939     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
940   } /* -----------------------------------  */
941 
942   /* wait on sends */
943   if (nsends2) {
944     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
945     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
946     ierr = PetscFree(send_status);CHKERRQ(ierr);
947   }
948 
949   ierr = PetscFree(starts3);CHKERRQ(ierr);
950   ierr = PetscFree(dest);CHKERRQ(ierr);
951   ierr = PetscFree(send_waits);CHKERRQ(ierr);
952 
953   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
954   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
955   ierr = PetscFree(starts);CHKERRQ(ierr);
956   ierr = PetscFree(starts2);CHKERRQ(ierr);
957   ierr = PetscFree(lens2);CHKERRQ(ierr);
958 
959   ierr = PetscFree(source);CHKERRQ(ierr);
960   ierr = PetscFree(len);CHKERRQ(ierr);
961   ierr = PetscFree(recvs);CHKERRQ(ierr);
962   ierr = PetscFree(nprocs);CHKERRQ(ierr);
963   ierr = PetscFree(sends2);CHKERRQ(ierr);
964 
965   /* put the information about myself as the first entry in the list */
966   first_procs    = (*procs)[0];
967   first_numprocs = (*numprocs)[0];
968   first_indices  = (*indices)[0];
969   for (i=0; i<*nproc; i++) {
970     if ((*procs)[i] == rank) {
971       (*procs)[0]    = (*procs)[i];
972       (*numprocs)[0] = (*numprocs)[i];
973       (*indices)[0]  = (*indices)[i];
974       (*procs)[i]    = first_procs;
975       (*numprocs)[i] = first_numprocs;
976       (*indices)[i]  = first_indices;
977       break;
978     }
979   }
980   PetscFunctionReturn(0);
981 }
982 
983 #undef __FUNCT__
984 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
985 /*@C
986     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
987 
988     Collective on ISLocalToGlobalMapping
989 
990     Input Parameters:
991 .   mapping - the mapping from local to global indexing
992 
993     Output Parameter:
994 +   nproc - number of processors that are connected to this one
995 .   proc - neighboring processors
996 .   numproc - number of indices for each processor
997 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
998 
999     Level: advanced
1000 
1001 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1002           ISLocalToGlobalMappingGetInfo()
1003 @*/
1004 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1005 {
1006   PetscErrorCode ierr;
1007   PetscInt       i;
1008 
1009   PetscFunctionBegin;
1010   ierr = PetscFree(*procs);CHKERRQ(ierr);
1011   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1012   if (*indices) {
1013     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1014     for (i=1; i<*nproc; i++) {
1015       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1016     }
1017     ierr = PetscFree(*indices);CHKERRQ(ierr);
1018   }
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1024 /*@C
1025    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1026 
1027    Not Collective
1028 
1029    Input Arguments:
1030 . ltog - local to global mapping
1031 
1032    Output Arguments:
1033 . array - array of indices
1034 
1035    Level: advanced
1036 
1037 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1038 @*/
1039 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1040 {
1041   PetscFunctionBegin;
1042   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1043   PetscValidPointer(array,2);
1044   *array = ltog->indices;
1045   PetscFunctionReturn(0);
1046 }
1047 
1048 #undef __FUNCT__
1049 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1050 /*@C
1051    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1052 
1053    Not Collective
1054 
1055    Input Arguments:
1056 + ltog - local to global mapping
1057 - array - array of indices
1058 
1059    Level: advanced
1060 
1061 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1062 @*/
1063 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1064 {
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1067   PetscValidPointer(array,2);
1068   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1069   *array = NULL;
1070   PetscFunctionReturn(0);
1071 }
1072 
1073 #undef __FUNCT__
1074 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1075 /*@C
1076    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1077 
1078    Not Collective
1079 
1080    Input Arguments:
1081 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1082 . n - number of mappings to concatenate
1083 - ltogs - local to global mappings
1084 
1085    Output Arguments:
1086 . ltogcat - new mapping
1087 
1088    Level: advanced
1089 
1090 .seealso: ISLocalToGlobalMappingCreate()
1091 @*/
1092 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1093 {
1094   PetscInt       i,cnt,m,*idx;
1095   PetscErrorCode ierr;
1096 
1097   PetscFunctionBegin;
1098   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1099   if (n > 0) PetscValidPointer(ltogs,3);
1100   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1101   PetscValidPointer(ltogcat,4);
1102   for (cnt=0,i=0; i<n; i++) {
1103     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1104     cnt += m;
1105   }
1106   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1107   for (cnt=0,i=0; i<n; i++) {
1108     const PetscInt *subidx;
1109     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1110     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1111     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1112     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1113     cnt += m;
1114   }
1115   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1116   PetscFunctionReturn(0);
1117 }
1118