xref: /petsc/src/vec/is/utils/isltog.c (revision 07475bc16356fc37e8c66fcce1957fb7d8feef24)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId  IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21     Concepts: mapping^local to global
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
29   PetscValidIntPointer(n,2);
30   *n = mapping->n;
31   PetscFunctionReturn(0);
32 }
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "ISLocalToGlobalMappingView"
36 /*@C
37     ISLocalToGlobalMappingView - View a local to global mapping
38 
39     Not Collective
40 
41     Input Parameters:
42 +   ltog - local to global mapping
43 -   viewer - viewer
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
52 {
53   PetscInt        i;
54   PetscMPIInt     rank;
55   PetscBool       iascii;
56   PetscErrorCode  ierr;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
60   if (!viewer) {
61     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
62   }
63   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
64 
65   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
66   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
67   if (iascii) {
68     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
74   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local number
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   PetscInt       n;
102   const PetscInt *indices;
103   MPI_Comm       comm;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(is,IS_CLASSID,1);
107   PetscValidPointer(mapping,2);
108 
109   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
110   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
111   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
112   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
113   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
119 /*@C
120     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
121     ordering and a global parallel ordering.
122 
123     Collective
124 
125     Input Parameter:
126 +   sf - star forest mapping contiguous local indices to (rank, offset)
127 -   start - first global index on this process
128 
129     Output Parameter:
130 .   mapping - new mapping data structure
131 
132     Level: advanced
133 
134     Concepts: mapping^local to global
135 
136 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
137 @*/
138 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
139 {
140   PetscErrorCode ierr;
141   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
142   const PetscInt *ilocal;
143   MPI_Comm       comm;
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
147   PetscValidPointer(mapping,3);
148 
149   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
150   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,PETSC_NULL);CHKERRQ(ierr);
151   if (ilocal) {for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);}
152   else maxlocal = nleaves;
153   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
154   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
155   for (i=0; i<nroots; i++) globals[i] = start + i;
156   for (i=0; i<maxlocal; i++) ltog[i] = -1;
157   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
158   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
159   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
160   ierr = PetscFree(globals);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
166 /*@
167     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
168     ordering and a global parallel ordering.
169 
170     Not Collective, but communicator may have more than one process
171 
172     Input Parameters:
173 +   comm - MPI communicator
174 .   n - the number of local elements
175 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
176 -   mode - see PetscCopyMode
177 
178     Output Parameter:
179 .   mapping - new mapping data structure
180 
181     Level: advanced
182 
183     Concepts: mapping^local to global
184 
185 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
186 @*/
187 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
188 {
189   PetscErrorCode ierr;
190   PetscInt       *in;
191 
192   PetscFunctionBegin;
193   if (n) PetscValidIntPointer(indices,3);
194   PetscValidPointer(mapping,4);
195 
196   *mapping = PETSC_NULL;
197 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
198   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
199 #endif
200 
201   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
202 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
203   (*mapping)->n       = n;
204   /*
205     Do not create the global to local mapping. This is only created if
206     ISGlobalToLocalMapping() is called
207   */
208   (*mapping)->globals = 0;
209   if (mode == PETSC_COPY_VALUES) {
210     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
211     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
212     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
213     (*mapping)->indices = in;
214   } else if (mode == PETSC_OWN_POINTER) {
215     (*mapping)->indices = (PetscInt*)indices;
216   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
222 /*@
223     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
224        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
225        and VecSetLocalToGlobalMappingBlock().
226 
227     Not Collective, but communicator may have more than one process
228 
229     Input Parameters:
230 +    inmap - original point-wise mapping
231 -    bs - block size
232 
233     Output Parameter:
234 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
235 
236     Level: advanced
237 
238     Concepts: mapping^local to global
239 
240 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
241 @*/
242 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
243 {
244   PetscErrorCode ierr;
245   PetscInt       *ii,i,n;
246 
247   PetscFunctionBegin;
248   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
249   PetscValidPointer(outmap,3);
250   if (bs > 1) {
251     n    = inmap->n/bs;
252     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
253     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
254     for (i=0; i<n; i++) {
255       ii[i] = inmap->indices[bs*i]/bs;
256     }
257     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
258   } else {
259     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
260     *outmap = inmap;
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
267 /*@
268     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
269        ISLocalToGlobalMapping
270 
271     Not Collective, but communicator may have more than one process
272 
273     Input Parameter:
274 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
275 - bs - block size
276 
277     Output Parameter:
278 .   outmap - pointwise mapping
279 
280     Level: advanced
281 
282     Concepts: mapping^local to global
283 
284 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
285 @*/
286 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
287 {
288   PetscErrorCode ierr;
289   PetscInt       *ii,i,n;
290 
291   PetscFunctionBegin;
292   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
293   PetscValidPointer(outmap,2);
294   if (bs > 1) {
295     n    = inmap->n*bs;
296     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
297     for (i=0; i<n; i++) {
298       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
299     }
300     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
301   } else {
302     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
303     *outmap = inmap;
304   }
305   PetscFunctionReturn(0);
306 }
307 
308 #undef __FUNCT__
309 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
310 /*@
311    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
312    ordering and a global parallel ordering.
313 
314    Note Collective
315 
316    Input Parameters:
317 .  mapping - mapping data structure
318 
319    Level: advanced
320 
321 .seealso: ISLocalToGlobalMappingCreate()
322 @*/
323 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
324 {
325   PetscErrorCode ierr;
326   PetscFunctionBegin;
327   if (!*mapping) PetscFunctionReturn(0);
328   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
329   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
330   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
331   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
332   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
333   *mapping = 0;
334   PetscFunctionReturn(0);
335 }
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
339 /*@
340     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
341     a new index set using the global numbering defined in an ISLocalToGlobalMapping
342     context.
343 
344     Not collective
345 
346     Input Parameters:
347 +   mapping - mapping between local and global numbering
348 -   is - index set in local numbering
349 
350     Output Parameters:
351 .   newis - index set in global numbering
352 
353     Level: advanced
354 
355     Concepts: mapping^local to global
356 
357 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
358           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
359 @*/
360 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
361 {
362   PetscErrorCode ierr;
363   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
364   const PetscInt *idxin;
365 
366   PetscFunctionBegin;
367   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
368   PetscValidHeaderSpecific(is,IS_CLASSID,2);
369   PetscValidPointer(newis,3);
370 
371   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
372   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
373   idxmap = mapping->indices;
374 
375   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
376   for (i=0; i<n; i++) {
377     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
378     idxout[i] = idxmap[idxin[i]];
379   }
380   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
381   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
382   PetscFunctionReturn(0);
383 }
384 
385 /*MC
386    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
387    and converts them to the global numbering.
388 
389    Synopsis:
390    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
391 
392    Not collective
393 
394    Input Parameters:
395 +  mapping - the local to global mapping context
396 .  N - number of integers
397 -  in - input indices in local numbering
398 
399    Output Parameter:
400 .  out - indices in global numbering
401 
402    Notes:
403    The in and out array parameters may be identical.
404 
405    Level: advanced
406 
407 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
408           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
409           AOPetscToApplication(), ISGlobalToLocalMappingApply()
410 
411     Concepts: mapping^local to global
412 
413 M*/
414 
415 /* -----------------------------------------------------------------------------------------*/
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
419 /*
420     Creates the global fields in the ISLocalToGlobalMapping structure
421 */
422 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
423 {
424   PetscErrorCode ierr;
425   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
426 
427   PetscFunctionBegin;
428   end   = 0;
429   start = PETSC_MAX_INT;
430 
431   for (i=0; i<n; i++) {
432     if (idx[i] < 0) continue;
433     if (idx[i] < start) start = idx[i];
434     if (idx[i] > end)   end   = idx[i];
435   }
436   if (start > end) {start = 0; end = -1;}
437   mapping->globalstart = start;
438   mapping->globalend   = end;
439 
440   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
441   mapping->globals = globals;
442   for (i=0; i<end-start+1; i++) {
443     globals[i] = -1;
444   }
445   for (i=0; i<n; i++) {
446     if (idx[i] < 0) continue;
447     globals[idx[i] - start] = i;
448   }
449 
450   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
451   PetscFunctionReturn(0);
452 }
453 
454 #undef __FUNCT__
455 #define __FUNCT__ "ISGlobalToLocalMappingApply"
456 /*@
457     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
458     specified with a global numbering.
459 
460     Not collective
461 
462     Input Parameters:
463 +   mapping - mapping between local and global numbering
464 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
465            IS_GTOLM_DROP - drops the indices with no local value from the output list
466 .   n - number of global indices to map
467 -   idx - global indices to map
468 
469     Output Parameters:
470 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
471 -   idxout - local index of each global index, one must pass in an array long enough
472              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
473              idxout == PETSC_NULL to determine the required length (returned in nout)
474              and then allocate the required space and call ISGlobalToLocalMappingApply()
475              a second time to set the values.
476 
477     Notes:
478     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
479 
480     This is not scalable in memory usage. Each processor requires O(Nglobal) size
481     array to compute these.
482 
483     Level: advanced
484 
485     Developer Note: The manual page states that idx and idxout may be identical but the calling
486        sequence declares idx as const so it cannot be the same as idxout.
487 
488     Concepts: mapping^global to local
489 
490 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
491           ISLocalToGlobalMappingDestroy()
492 @*/
493 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
494                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
495 {
496   PetscInt       i,*globals,nf = 0,tmp,start,end;
497   PetscErrorCode ierr;
498 
499   PetscFunctionBegin;
500   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
501   if (!mapping->globals) {
502     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
503   }
504   globals = mapping->globals;
505   start   = mapping->globalstart;
506   end     = mapping->globalend;
507 
508   if (type == IS_GTOLM_MASK) {
509     if (idxout) {
510       for (i=0; i<n; i++) {
511         if (idx[i] < 0) idxout[i] = idx[i];
512         else if (idx[i] < start) idxout[i] = -1;
513         else if (idx[i] > end)   idxout[i] = -1;
514         else                     idxout[i] = globals[idx[i] - start];
515       }
516     }
517     if (nout) *nout = n;
518   } else {
519     if (idxout) {
520       for (i=0; i<n; i++) {
521         if (idx[i] < 0) continue;
522         if (idx[i] < start) continue;
523         if (idx[i] > end) continue;
524         tmp = globals[idx[i] - start];
525         if (tmp < 0) continue;
526         idxout[nf++] = tmp;
527       }
528     } else {
529       for (i=0; i<n; i++) {
530         if (idx[i] < 0) continue;
531         if (idx[i] < start) continue;
532         if (idx[i] > end) continue;
533         tmp = globals[idx[i] - start];
534         if (tmp < 0) continue;
535         nf++;
536       }
537     }
538     if (nout) *nout = nf;
539   }
540 
541   PetscFunctionReturn(0);
542 }
543 
544 #undef __FUNCT__
545 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
546 /*@C
547     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
548      each index shared by more than one processor
549 
550     Collective on ISLocalToGlobalMapping
551 
552     Input Parameters:
553 .   mapping - the mapping from local to global indexing
554 
555     Output Parameter:
556 +   nproc - number of processors that are connected to this one
557 .   proc - neighboring processors
558 .   numproc - number of indices for each subdomain (processor)
559 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
560 
561     Level: advanced
562 
563     Concepts: mapping^local to global
564 
565     Fortran Usage:
566 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
567 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
568           PetscInt indices[nproc][numprocmax],ierr)
569         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
570         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
571 
572 
573 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
574           ISLocalToGlobalMappingRestoreInfo()
575 @*/
576 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
577 {
578   PetscErrorCode ierr;
579   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
580   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
581   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
582   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
583   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
584   PetscInt       first_procs,first_numprocs,*first_indices;
585   MPI_Request    *recv_waits,*send_waits;
586   MPI_Status     recv_status,*send_status,*recv_statuses;
587   MPI_Comm       comm = ((PetscObject)mapping)->comm;
588   PetscBool      debug = PETSC_FALSE;
589 
590   PetscFunctionBegin;
591   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
592   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
593   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
594   if (size == 1) {
595     *nproc         = 0;
596     *procs         = PETSC_NULL;
597     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
598     (*numprocs)[0] = 0;
599     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
600     (*indices)[0]  = PETSC_NULL;
601     PetscFunctionReturn(0);
602   }
603 
604   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
605 
606   /*
607     Notes on ISLocalToGlobalMappingGetInfo
608 
609     globally owned node - the nodes that have been assigned to this processor in global
610            numbering, just for this routine.
611 
612     nontrivial globally owned node - node assigned to this processor that is on a subdomain
613            boundary (i.e. is has more than one local owner)
614 
615     locally owned node - node that exists on this processors subdomain
616 
617     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
618            local subdomain
619   */
620   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
621   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
622   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
623 
624   for (i=0; i<n; i++) {
625     if (lindices[i] > max) max = lindices[i];
626   }
627   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
628   Ng++;
629   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
630   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
631   scale  = Ng/size + 1;
632   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
633   rstart = scale*rank;
634 
635   /* determine ownership ranges of global indices */
636   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
637   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
638 
639   /* determine owners of each local node  */
640   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
641   for (i=0; i<n; i++) {
642     proc             = lindices[i]/scale; /* processor that globally owns this index */
643     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
644     owner[i]         = proc;
645     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
646   }
647   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
648   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
649 
650   /* inform other processors of number of messages and max length*/
651   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
652   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
653 
654   /* post receives for owned rows */
655   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
656   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
657   for (i=0; i<nrecvs; i++) {
658     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
659   }
660 
661   /* pack messages containing lists of local nodes to owners */
662   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
663   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
664   starts[0]  = 0;
665   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
666   for (i=0; i<n; i++) {
667     sends[starts[owner[i]]++] = lindices[i];
668     sends[starts[owner[i]]++] = i;
669   }
670   ierr = PetscFree(owner);CHKERRQ(ierr);
671   starts[0]  = 0;
672   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
673 
674   /* send the messages */
675   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
676   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
677   cnt = 0;
678   for (i=0; i<size; i++) {
679     if (nprocs[2*i]) {
680       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
681       dest[cnt] = i;
682       cnt++;
683     }
684   }
685   ierr = PetscFree(starts);CHKERRQ(ierr);
686 
687   /* wait on receives */
688   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
689   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
690   cnt  = nrecvs;
691   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
692   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
693   while (cnt) {
694     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
695     /* unpack receives into our local space */
696     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
697     source[imdex]  = recv_status.MPI_SOURCE;
698     len[imdex]     = len[imdex]/2;
699     /* count how many local owners for each of my global owned indices */
700     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
701     cnt--;
702   }
703   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
704 
705   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
706   nowned  = 0;
707   nownedm = 0;
708   for (i=0; i<ng; i++) {
709     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
710   }
711 
712   /* create single array to contain rank of all local owners of each globally owned index */
713   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
714   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
715   starts[0] = 0;
716   for (i=1; i<ng; i++) {
717     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
718     else starts[i] = starts[i-1];
719   }
720 
721   /* for each nontrival globally owned node list all arriving processors */
722   for (i=0; i<nrecvs; i++) {
723     for (j=0; j<len[i]; j++) {
724       node = recvs[2*i*nmax+2*j]-rstart;
725       if (nownedsenders[node] > 1) {
726         ownedsenders[starts[node]++] = source[i];
727       }
728     }
729   }
730 
731   if (debug) { /* -----------------------------------  */
732     starts[0]    = 0;
733     for (i=1; i<ng; i++) {
734       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
735       else starts[i] = starts[i-1];
736     }
737     for (i=0; i<ng; i++) {
738       if (nownedsenders[i] > 1) {
739         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
740         for (j=0; j<nownedsenders[i]; j++) {
741           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
742         }
743         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
744       }
745     }
746     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
747   }/* -----------------------------------  */
748 
749   /* wait on original sends */
750   if (nsends) {
751     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
752     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
753     ierr = PetscFree(send_status);CHKERRQ(ierr);
754   }
755   ierr = PetscFree(send_waits);CHKERRQ(ierr);
756   ierr = PetscFree(sends);CHKERRQ(ierr);
757   ierr = PetscFree(nprocs);CHKERRQ(ierr);
758 
759   /* pack messages to send back to local owners */
760   starts[0]    = 0;
761   for (i=1; i<ng; i++) {
762     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
763     else starts[i] = starts[i-1];
764   }
765   nsends2 = nrecvs;
766   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
767   for (i=0; i<nrecvs; i++) {
768     nprocs[i] = 1;
769     for (j=0; j<len[i]; j++) {
770       node = recvs[2*i*nmax+2*j]-rstart;
771       if (nownedsenders[node] > 1) {
772         nprocs[i] += 2 + nownedsenders[node];
773       }
774     }
775   }
776   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
777   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
778   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
779   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
780   /*
781      Each message is 1 + nprocs[i] long, and consists of
782        (0) the number of nodes being sent back
783        (1) the local node number,
784        (2) the number of processors sharing it,
785        (3) the processors sharing it
786   */
787   for (i=0; i<nsends2; i++) {
788     cnt = 1;
789     sends2[starts2[i]] = 0;
790     for (j=0; j<len[i]; j++) {
791       node = recvs[2*i*nmax+2*j]-rstart;
792       if (nownedsenders[node] > 1) {
793         sends2[starts2[i]]++;
794         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
795         sends2[starts2[i]+cnt++] = nownedsenders[node];
796         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
797         cnt += nownedsenders[node];
798       }
799     }
800   }
801 
802   /* receive the message lengths */
803   nrecvs2 = nsends;
804   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
805   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
806   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
807   for (i=0; i<nrecvs2; i++) {
808     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
809   }
810 
811   /* send the message lengths */
812   for (i=0; i<nsends2; i++) {
813     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
814   }
815 
816   /* wait on receives of lens */
817   if (nrecvs2) {
818     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
819     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
820     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
821   }
822   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
823 
824   starts3[0] = 0;
825   nt         = 0;
826   for (i=0; i<nrecvs2-1; i++) {
827     starts3[i+1] = starts3[i] + lens2[i];
828     nt          += lens2[i];
829   }
830   if (nrecvs2) nt += lens2[nrecvs2-1];
831 
832   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
833   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
834   for (i=0; i<nrecvs2; i++) {
835     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
836   }
837 
838   /* send the messages */
839   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
840   for (i=0; i<nsends2; i++) {
841     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
842   }
843 
844   /* wait on receives */
845   if (nrecvs2) {
846     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
847     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
848     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
849   }
850   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
851   ierr = PetscFree(nprocs);CHKERRQ(ierr);
852 
853   if (debug) { /* -----------------------------------  */
854     cnt = 0;
855     for (i=0; i<nrecvs2; i++) {
856       nt = recvs2[cnt++];
857       for (j=0; j<nt; j++) {
858         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
859         for (k=0; k<recvs2[cnt+1]; k++) {
860           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
861         }
862         cnt += 2 + recvs2[cnt+1];
863         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
864       }
865     }
866     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
867   } /* -----------------------------------  */
868 
869   /* count number subdomains for each local node */
870   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
871   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
872   cnt  = 0;
873   for (i=0; i<nrecvs2; i++) {
874     nt = recvs2[cnt++];
875     for (j=0; j<nt; j++) {
876       for (k=0; k<recvs2[cnt+1]; k++) {
877         nprocs[recvs2[cnt+2+k]]++;
878       }
879       cnt += 2 + recvs2[cnt+1];
880     }
881   }
882   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
883   *nproc    = nt;
884   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
885   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
886   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
887   for (i=0;i<nt+1;i++) { (*indices)[i]=PETSC_NULL; }
888   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
889   cnt       = 0;
890   for (i=0; i<size; i++) {
891     if (nprocs[i] > 0) {
892       bprocs[i]        = cnt;
893       (*procs)[cnt]    = i;
894       (*numprocs)[cnt] = nprocs[i];
895       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
896       cnt++;
897     }
898   }
899 
900   /* make the list of subdomains for each nontrivial local node */
901   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
902   cnt  = 0;
903   for (i=0; i<nrecvs2; i++) {
904     nt = recvs2[cnt++];
905     for (j=0; j<nt; j++) {
906       for (k=0; k<recvs2[cnt+1]; k++) {
907         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
908       }
909       cnt += 2 + recvs2[cnt+1];
910     }
911   }
912   ierr = PetscFree(bprocs);CHKERRQ(ierr);
913   ierr = PetscFree(recvs2);CHKERRQ(ierr);
914 
915   /* sort the node indexing by their global numbers */
916   nt = *nproc;
917   for (i=0; i<nt; i++) {
918     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
919     for (j=0; j<(*numprocs)[i]; j++) {
920       tmp[j] = lindices[(*indices)[i][j]];
921     }
922     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
923     ierr = PetscFree(tmp);CHKERRQ(ierr);
924   }
925 
926   if (debug) { /* -----------------------------------  */
927     nt = *nproc;
928     for (i=0; i<nt; i++) {
929       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
930       for (j=0; j<(*numprocs)[i]; j++) {
931         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
932       }
933       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
934     }
935     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
936   } /* -----------------------------------  */
937 
938   /* wait on sends */
939   if (nsends2) {
940     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
941     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
942     ierr = PetscFree(send_status);CHKERRQ(ierr);
943   }
944 
945   ierr = PetscFree(starts3);CHKERRQ(ierr);
946   ierr = PetscFree(dest);CHKERRQ(ierr);
947   ierr = PetscFree(send_waits);CHKERRQ(ierr);
948 
949   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
950   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
951   ierr = PetscFree(starts);CHKERRQ(ierr);
952   ierr = PetscFree(starts2);CHKERRQ(ierr);
953   ierr = PetscFree(lens2);CHKERRQ(ierr);
954 
955   ierr = PetscFree(source);CHKERRQ(ierr);
956   ierr = PetscFree(len);CHKERRQ(ierr);
957   ierr = PetscFree(recvs);CHKERRQ(ierr);
958   ierr = PetscFree(nprocs);CHKERRQ(ierr);
959   ierr = PetscFree(sends2);CHKERRQ(ierr);
960 
961   /* put the information about myself as the first entry in the list */
962   first_procs    = (*procs)[0];
963   first_numprocs = (*numprocs)[0];
964   first_indices  = (*indices)[0];
965   for (i=0; i<*nproc; i++) {
966     if ((*procs)[i] == rank) {
967       (*procs)[0]    = (*procs)[i];
968       (*numprocs)[0] = (*numprocs)[i];
969       (*indices)[0]  = (*indices)[i];
970       (*procs)[i]    = first_procs;
971       (*numprocs)[i] = first_numprocs;
972       (*indices)[i]  = first_indices;
973       break;
974     }
975   }
976   PetscFunctionReturn(0);
977 }
978 
979 #undef __FUNCT__
980 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
981 /*@C
982     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
983 
984     Collective on ISLocalToGlobalMapping
985 
986     Input Parameters:
987 .   mapping - the mapping from local to global indexing
988 
989     Output Parameter:
990 +   nproc - number of processors that are connected to this one
991 .   proc - neighboring processors
992 .   numproc - number of indices for each processor
993 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
994 
995     Level: advanced
996 
997 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
998           ISLocalToGlobalMappingGetInfo()
999 @*/
1000 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1001 {
1002   PetscErrorCode ierr;
1003   PetscInt       i;
1004 
1005   PetscFunctionBegin;
1006   ierr = PetscFree(*procs);CHKERRQ(ierr);
1007   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1008   if (*indices) {
1009     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1010     for (i=1; i<*nproc; i++) {
1011       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1012     }
1013     ierr = PetscFree(*indices);CHKERRQ(ierr);
1014   }
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1020 /*@C
1021    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1022 
1023    Not Collective
1024 
1025    Input Arguments:
1026 . ltog - local to global mapping
1027 
1028    Output Arguments:
1029 . array - array of indices
1030 
1031    Level: advanced
1032 
1033 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1034 @*/
1035 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1036 {
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1040   PetscValidPointer(array,2);
1041   *array = ltog->indices;
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 #undef __FUNCT__
1046 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1047 /*@C
1048    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1049 
1050    Not Collective
1051 
1052    Input Arguments:
1053 + ltog - local to global mapping
1054 - array - array of indices
1055 
1056    Level: advanced
1057 
1058 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1059 @*/
1060 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1061 {
1062 
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1065   PetscValidPointer(array,2);
1066   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1067   *array = PETSC_NULL;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 #undef __FUNCT__
1072 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1073 /*@C
1074    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1075 
1076    Not Collective
1077 
1078    Input Arguments:
1079 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1080 . n - number of mappings to concatenate
1081 - ltogs - local to global mappings
1082 
1083    Output Arguments:
1084 . ltogcat - new mapping
1085 
1086    Level: advanced
1087 
1088 .seealso: ISLocalToGlobalMappingCreate()
1089 @*/
1090 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1091 {
1092   PetscInt       i,cnt,m,*idx;
1093   PetscErrorCode ierr;
1094 
1095   PetscFunctionBegin;
1096   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1097   if (n > 0) PetscValidPointer(ltogs,3);
1098   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1099   PetscValidPointer(ltogcat,4);
1100   for (cnt=0,i=0; i<n; i++) {
1101     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1102     cnt += m;
1103   }
1104   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1105   for (cnt=0,i=0; i<n; i++) {
1106     const PetscInt *subidx;
1107     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1108     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1109     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1110     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1111     cnt += m;
1112   }
1113   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1114   PetscFunctionReturn(0);
1115 }
1116