xref: /petsc/src/vec/is/utils/isltog.c (revision 519f805a543c2a7f195bcba21173bb2cdfadb956)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId  IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21     Concepts: mapping^local to global
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
29   PetscValidIntPointer(n,2);
30   *n = mapping->n;
31   PetscFunctionReturn(0);
32 }
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "ISLocalToGlobalMappingView"
36 /*@C
37     ISLocalToGlobalMappingView - View a local to global mapping
38 
39     Not Collective
40 
41     Input Parameters:
42 +   ltog - local to global mapping
43 -   viewer - viewer
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
52 {
53   PetscInt        i;
54   PetscMPIInt     rank;
55   PetscBool       iascii;
56   PetscErrorCode  ierr;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
60   if (!viewer) {
61     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
62   }
63   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
64 
65   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
66   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
67   if (iascii) {
68     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
74   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local number
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   PetscInt       n;
102   const PetscInt *indices;
103   MPI_Comm       comm;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(is,IS_CLASSID,1);
107   PetscValidPointer(mapping,2);
108 
109   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
110   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
111   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
112   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
113   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
119 /*@C
120     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
121     ordering and a global parallel ordering.
122 
123     Collective
124 
125     Input Parameter:
126 +   sf - star forest mapping contiguous local indices to (rank, offset)
127 -   start - first global index on this process
128 
129     Output Parameter:
130 .   mapping - new mapping data structure
131 
132     Level: advanced
133 
134     Concepts: mapping^local to global
135 
136 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
137 @*/
138 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
139 {
140   PetscErrorCode ierr;
141   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
142   const PetscInt *ilocal;
143   MPI_Comm       comm;
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
147   PetscValidPointer(mapping,3);
148 
149   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
150   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,PETSC_NULL);CHKERRQ(ierr);
151   if (ilocal) {for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);}
152   else maxlocal = nleaves;
153   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
154   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
155   for (i=0; i<nroots; i++) globals[i] = start + i;
156   for (i=0; i<maxlocal; i++) ltog[i] = -1;
157   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
158   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
159   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
160   ierr = PetscFree(globals);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
166 /*@
167     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
168     ordering and a global parallel ordering.
169 
170     Not Collective, but communicator may have more than one process
171 
172     Input Parameters:
173 +   comm - MPI communicator
174 .   n - the number of local elements
175 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
176 -   mode - see PetscCopyMode
177 
178     Output Parameter:
179 .   mapping - new mapping data structure
180 
181     Level: advanced
182 
183     Concepts: mapping^local to global
184 
185 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
186 @*/
187 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
188 {
189   PetscErrorCode ierr;
190   PetscInt       *in;
191 
192   PetscFunctionBegin;
193   if (n) PetscValidIntPointer(indices,3);
194   PetscValidPointer(mapping,4);
195 
196   *mapping = PETSC_NULL;
197 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
198   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
199 #endif
200 
201   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
202                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
203   (*mapping)->n       = n;
204   /*
205     Do not create the global to local mapping. This is only created if
206     ISGlobalToLocalMapping() is called
207   */
208   (*mapping)->globals = 0;
209   if (mode == PETSC_COPY_VALUES) {
210     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
211     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
212     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
213     (*mapping)->indices = in;
214   } else if (mode == PETSC_OWN_POINTER) {
215     (*mapping)->indices = (PetscInt*)indices;
216   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
222 /*@
223     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
224        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
225        and VecSetLocalToGlobalMappingBlock().
226 
227     Not Collective, but communicator may have more than one process
228 
229     Input Parameters:
230 +    inmap - original point-wise mapping
231 -    bs - block size
232 
233     Output Parameter:
234 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
235 
236     Level: advanced
237 
238     Concepts: mapping^local to global
239 
240 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
241 @*/
242 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
243 {
244   PetscErrorCode ierr;
245   PetscInt       *ii,i,n;
246 
247   PetscFunctionBegin;
248   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
249   PetscValidPointer(outmap,3);
250   if (bs > 1) {
251     n    = inmap->n/bs;
252     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
253     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
254     for (i=0; i<n; i++) {
255       ii[i] = inmap->indices[bs*i]/bs;
256     }
257     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
258   } else {
259     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
260     *outmap = inmap;
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
267 /*@
268     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
269        ISLocalToGlobalMapping
270 
271     Not Collective, but communicator may have more than one process
272 
273     Input Parameter:
274 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
275 - bs - block size
276 
277     Output Parameter:
278 .   outmap - pointwise mapping
279 
280     Level: advanced
281 
282     Concepts: mapping^local to global
283 
284 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
285 @*/
286 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
287 {
288   PetscErrorCode ierr;
289   PetscInt       *ii,i,n;
290 
291   PetscFunctionBegin;
292   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
293   PetscValidPointer(outmap,2);
294   if (bs > 1) {
295     n    = inmap->n*bs;
296     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
297     for (i=0; i<n; i++) {
298       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
299     }
300     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
301   } else {
302     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
303     *outmap = inmap;
304   }
305   PetscFunctionReturn(0);
306 }
307 
308 #undef __FUNCT__
309 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
310 /*@
311    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
312    ordering and a global parallel ordering.
313 
314    Note Collective
315 
316    Input Parameters:
317 .  mapping - mapping data structure
318 
319    Level: advanced
320 
321 .seealso: ISLocalToGlobalMappingCreate()
322 @*/
323 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
324 {
325   PetscErrorCode ierr;
326   PetscFunctionBegin;
327   if (!*mapping) PetscFunctionReturn(0);
328   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
329   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
330   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
331   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
332   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
333   *mapping = 0;
334   PetscFunctionReturn(0);
335 }
336 
337 #undef __FUNCT__
338 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
339 /*@
340     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
341     a new index set using the global numbering defined in an ISLocalToGlobalMapping
342     context.
343 
344     Not collective
345 
346     Input Parameters:
347 +   mapping - mapping between local and global numbering
348 -   is - index set in local numbering
349 
350     Output Parameters:
351 .   newis - index set in global numbering
352 
353     Level: advanced
354 
355     Concepts: mapping^local to global
356 
357 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
358           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
359 @*/
360 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
361 {
362   PetscErrorCode ierr;
363   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
364   const PetscInt *idxin;
365 
366   PetscFunctionBegin;
367   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
368   PetscValidHeaderSpecific(is,IS_CLASSID,2);
369   PetscValidPointer(newis,3);
370 
371   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
372   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
373   idxmap = mapping->indices;
374 
375   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
376   for (i=0; i<n; i++) {
377     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
378     idxout[i] = idxmap[idxin[i]];
379   }
380   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
381   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
382   PetscFunctionReturn(0);
383 }
384 
385 /*MC
386    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
387    and converts them to the global numbering.
388 
389    Synopsis:
390    #include "petscis.h"
391    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
392 
393    Not collective
394 
395    Input Parameters:
396 +  mapping - the local to global mapping context
397 .  N - number of integers
398 -  in - input indices in local numbering
399 
400    Output Parameter:
401 .  out - indices in global numbering
402 
403    Notes:
404    The in and out array parameters may be identical.
405 
406    Level: advanced
407 
408 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
409           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
410           AOPetscToApplication(), ISGlobalToLocalMappingApply()
411 
412     Concepts: mapping^local to global
413 
414 M*/
415 
416 /* -----------------------------------------------------------------------------------------*/
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
420 /*
421     Creates the global fields in the ISLocalToGlobalMapping structure
422 */
423 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
424 {
425   PetscErrorCode ierr;
426   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
427 
428   PetscFunctionBegin;
429   end   = 0;
430   start = PETSC_MAX_INT;
431 
432   for (i=0; i<n; i++) {
433     if (idx[i] < 0) continue;
434     if (idx[i] < start) start = idx[i];
435     if (idx[i] > end)   end   = idx[i];
436   }
437   if (start > end) {start = 0; end = -1;}
438   mapping->globalstart = start;
439   mapping->globalend   = end;
440 
441   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
442   mapping->globals = globals;
443   for (i=0; i<end-start+1; i++) {
444     globals[i] = -1;
445   }
446   for (i=0; i<n; i++) {
447     if (idx[i] < 0) continue;
448     globals[idx[i] - start] = i;
449   }
450 
451   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "ISGlobalToLocalMappingApply"
457 /*@
458     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
459     specified with a global numbering.
460 
461     Not collective
462 
463     Input Parameters:
464 +   mapping - mapping between local and global numbering
465 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
466            IS_GTOLM_DROP - drops the indices with no local value from the output list
467 .   n - number of global indices to map
468 -   idx - global indices to map
469 
470     Output Parameters:
471 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
472 -   idxout - local index of each global index, one must pass in an array long enough
473              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
474              idxout == PETSC_NULL to determine the required length (returned in nout)
475              and then allocate the required space and call ISGlobalToLocalMappingApply()
476              a second time to set the values.
477 
478     Notes:
479     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
480 
481     This is not scalable in memory usage. Each processor requires O(Nglobal) size
482     array to compute these.
483 
484     Level: advanced
485 
486     Developer Note: The manual page states that idx and idxout may be identical but the calling
487        sequence declares idx as const so it cannot be the same as idxout.
488 
489     Concepts: mapping^global to local
490 
491 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
492           ISLocalToGlobalMappingDestroy()
493 @*/
494 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
495                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
496 {
497   PetscInt       i,*globals,nf = 0,tmp,start,end;
498   PetscErrorCode ierr;
499 
500   PetscFunctionBegin;
501   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
502   if (!mapping->globals) {
503     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
504   }
505   globals = mapping->globals;
506   start   = mapping->globalstart;
507   end     = mapping->globalend;
508 
509   if (type == IS_GTOLM_MASK) {
510     if (idxout) {
511       for (i=0; i<n; i++) {
512         if (idx[i] < 0) idxout[i] = idx[i];
513         else if (idx[i] < start) idxout[i] = -1;
514         else if (idx[i] > end)   idxout[i] = -1;
515         else                     idxout[i] = globals[idx[i] - start];
516       }
517     }
518     if (nout) *nout = n;
519   } else {
520     if (idxout) {
521       for (i=0; i<n; i++) {
522         if (idx[i] < 0) continue;
523         if (idx[i] < start) continue;
524         if (idx[i] > end) continue;
525         tmp = globals[idx[i] - start];
526         if (tmp < 0) continue;
527         idxout[nf++] = tmp;
528       }
529     } else {
530       for (i=0; i<n; i++) {
531         if (idx[i] < 0) continue;
532         if (idx[i] < start) continue;
533         if (idx[i] > end) continue;
534         tmp = globals[idx[i] - start];
535         if (tmp < 0) continue;
536         nf++;
537       }
538     }
539     if (nout) *nout = nf;
540   }
541 
542   PetscFunctionReturn(0);
543 }
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
547 /*@C
548     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
549      each index shared by more than one processor
550 
551     Collective on ISLocalToGlobalMapping
552 
553     Input Parameters:
554 .   mapping - the mapping from local to global indexing
555 
556     Output Parameter:
557 +   nproc - number of processors that are connected to this one
558 .   proc - neighboring processors
559 .   numproc - number of indices for each subdomain (processor)
560 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
561 
562     Level: advanced
563 
564     Concepts: mapping^local to global
565 
566     Fortran Usage:
567 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
568 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
569           PetscInt indices[nproc][numprocmax],ierr)
570         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
571         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
572 
573 
574 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
575           ISLocalToGlobalMappingRestoreInfo()
576 @*/
577 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
578 {
579   PetscErrorCode ierr;
580   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
581   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
582   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
583   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
584   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
585   PetscInt       first_procs,first_numprocs,*first_indices;
586   MPI_Request    *recv_waits,*send_waits;
587   MPI_Status     recv_status,*send_status,*recv_statuses;
588   MPI_Comm       comm = ((PetscObject)mapping)->comm;
589   PetscBool      debug = PETSC_FALSE;
590 
591   PetscFunctionBegin;
592   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
593   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
594   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
595   if (size == 1) {
596     *nproc         = 0;
597     *procs         = PETSC_NULL;
598     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
599     (*numprocs)[0] = 0;
600     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
601     (*indices)[0]  = PETSC_NULL;
602     PetscFunctionReturn(0);
603   }
604 
605   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
606 
607   /*
608     Notes on ISLocalToGlobalMappingGetInfo
609 
610     globally owned node - the nodes that have been assigned to this processor in global
611            numbering, just for this routine.
612 
613     nontrivial globally owned node - node assigned to this processor that is on a subdomain
614            boundary (i.e. is has more than one local owner)
615 
616     locally owned node - node that exists on this processors subdomain
617 
618     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
619            local subdomain
620   */
621   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
622   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
623   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
624 
625   for (i=0; i<n; i++) {
626     if (lindices[i] > max) max = lindices[i];
627   }
628   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
629   Ng++;
630   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
631   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
632   scale  = Ng/size + 1;
633   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
634   rstart = scale*rank;
635 
636   /* determine ownership ranges of global indices */
637   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
638   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
639 
640   /* determine owners of each local node  */
641   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
642   for (i=0; i<n; i++) {
643     proc             = lindices[i]/scale; /* processor that globally owns this index */
644     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
645     owner[i]         = proc;
646     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
647   }
648   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
649   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
650 
651   /* inform other processors of number of messages and max length*/
652   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
653   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
654 
655   /* post receives for owned rows */
656   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
657   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
658   for (i=0; i<nrecvs; i++) {
659     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
660   }
661 
662   /* pack messages containing lists of local nodes to owners */
663   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
664   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
665   starts[0]  = 0;
666   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
667   for (i=0; i<n; i++) {
668     sends[starts[owner[i]]++] = lindices[i];
669     sends[starts[owner[i]]++] = i;
670   }
671   ierr = PetscFree(owner);CHKERRQ(ierr);
672   starts[0]  = 0;
673   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
674 
675   /* send the messages */
676   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
677   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
678   cnt = 0;
679   for (i=0; i<size; i++) {
680     if (nprocs[2*i]) {
681       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
682       dest[cnt] = i;
683       cnt++;
684     }
685   }
686   ierr = PetscFree(starts);CHKERRQ(ierr);
687 
688   /* wait on receives */
689   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
690   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
691   cnt  = nrecvs;
692   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
693   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
694   while (cnt) {
695     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
696     /* unpack receives into our local space */
697     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
698     source[imdex]  = recv_status.MPI_SOURCE;
699     len[imdex]     = len[imdex]/2;
700     /* count how many local owners for each of my global owned indices */
701     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
702     cnt--;
703   }
704   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
705 
706   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
707   nowned  = 0;
708   nownedm = 0;
709   for (i=0; i<ng; i++) {
710     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
711   }
712 
713   /* create single array to contain rank of all local owners of each globally owned index */
714   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
715   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
716   starts[0] = 0;
717   for (i=1; i<ng; i++) {
718     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
719     else starts[i] = starts[i-1];
720   }
721 
722   /* for each nontrival globally owned node list all arriving processors */
723   for (i=0; i<nrecvs; i++) {
724     for (j=0; j<len[i]; j++) {
725       node = recvs[2*i*nmax+2*j]-rstart;
726       if (nownedsenders[node] > 1) {
727         ownedsenders[starts[node]++] = source[i];
728       }
729     }
730   }
731 
732   if (debug) { /* -----------------------------------  */
733     starts[0]    = 0;
734     for (i=1; i<ng; i++) {
735       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
736       else starts[i] = starts[i-1];
737     }
738     for (i=0; i<ng; i++) {
739       if (nownedsenders[i] > 1) {
740         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
741         for (j=0; j<nownedsenders[i]; j++) {
742           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
743         }
744         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
745       }
746     }
747     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
748   }/* -----------------------------------  */
749 
750   /* wait on original sends */
751   if (nsends) {
752     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
753     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
754     ierr = PetscFree(send_status);CHKERRQ(ierr);
755   }
756   ierr = PetscFree(send_waits);CHKERRQ(ierr);
757   ierr = PetscFree(sends);CHKERRQ(ierr);
758   ierr = PetscFree(nprocs);CHKERRQ(ierr);
759 
760   /* pack messages to send back to local owners */
761   starts[0]    = 0;
762   for (i=1; i<ng; i++) {
763     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
764     else starts[i] = starts[i-1];
765   }
766   nsends2 = nrecvs;
767   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
768   for (i=0; i<nrecvs; i++) {
769     nprocs[i] = 1;
770     for (j=0; j<len[i]; j++) {
771       node = recvs[2*i*nmax+2*j]-rstart;
772       if (nownedsenders[node] > 1) {
773         nprocs[i] += 2 + nownedsenders[node];
774       }
775     }
776   }
777   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
778   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
779   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
780   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
781   /*
782      Each message is 1 + nprocs[i] long, and consists of
783        (0) the number of nodes being sent back
784        (1) the local node number,
785        (2) the number of processors sharing it,
786        (3) the processors sharing it
787   */
788   for (i=0; i<nsends2; i++) {
789     cnt = 1;
790     sends2[starts2[i]] = 0;
791     for (j=0; j<len[i]; j++) {
792       node = recvs[2*i*nmax+2*j]-rstart;
793       if (nownedsenders[node] > 1) {
794         sends2[starts2[i]]++;
795         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
796         sends2[starts2[i]+cnt++] = nownedsenders[node];
797         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
798         cnt += nownedsenders[node];
799       }
800     }
801   }
802 
803   /* receive the message lengths */
804   nrecvs2 = nsends;
805   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
806   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
807   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
808   for (i=0; i<nrecvs2; i++) {
809     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
810   }
811 
812   /* send the message lengths */
813   for (i=0; i<nsends2; i++) {
814     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
815   }
816 
817   /* wait on receives of lens */
818   if (nrecvs2) {
819     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
820     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
821     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
822   }
823   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
824 
825   starts3[0] = 0;
826   nt         = 0;
827   for (i=0; i<nrecvs2-1; i++) {
828     starts3[i+1] = starts3[i] + lens2[i];
829     nt          += lens2[i];
830   }
831   if (nrecvs2) nt += lens2[nrecvs2-1];
832 
833   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
834   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
835   for (i=0; i<nrecvs2; i++) {
836     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
837   }
838 
839   /* send the messages */
840   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
841   for (i=0; i<nsends2; i++) {
842     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
843   }
844 
845   /* wait on receives */
846   if (nrecvs2) {
847     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
848     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
849     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
850   }
851   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
852   ierr = PetscFree(nprocs);CHKERRQ(ierr);
853 
854   if (debug) { /* -----------------------------------  */
855     cnt = 0;
856     for (i=0; i<nrecvs2; i++) {
857       nt = recvs2[cnt++];
858       for (j=0; j<nt; j++) {
859         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
860         for (k=0; k<recvs2[cnt+1]; k++) {
861           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
862         }
863         cnt += 2 + recvs2[cnt+1];
864         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
865       }
866     }
867     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
868   } /* -----------------------------------  */
869 
870   /* count number subdomains for each local node */
871   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
872   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
873   cnt  = 0;
874   for (i=0; i<nrecvs2; i++) {
875     nt = recvs2[cnt++];
876     for (j=0; j<nt; j++) {
877       for (k=0; k<recvs2[cnt+1]; k++) {
878         nprocs[recvs2[cnt+2+k]]++;
879       }
880       cnt += 2 + recvs2[cnt+1];
881     }
882   }
883   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
884   *nproc    = nt;
885   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
886   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
887   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
888   for (i=0;i<nt+1;i++) { (*indices)[i]=PETSC_NULL; }
889   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
890   cnt       = 0;
891   for (i=0; i<size; i++) {
892     if (nprocs[i] > 0) {
893       bprocs[i]        = cnt;
894       (*procs)[cnt]    = i;
895       (*numprocs)[cnt] = nprocs[i];
896       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
897       cnt++;
898     }
899   }
900 
901   /* make the list of subdomains for each nontrivial local node */
902   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
903   cnt  = 0;
904   for (i=0; i<nrecvs2; i++) {
905     nt = recvs2[cnt++];
906     for (j=0; j<nt; j++) {
907       for (k=0; k<recvs2[cnt+1]; k++) {
908         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
909       }
910       cnt += 2 + recvs2[cnt+1];
911     }
912   }
913   ierr = PetscFree(bprocs);CHKERRQ(ierr);
914   ierr = PetscFree(recvs2);CHKERRQ(ierr);
915 
916   /* sort the node indexing by their global numbers */
917   nt = *nproc;
918   for (i=0; i<nt; i++) {
919     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
920     for (j=0; j<(*numprocs)[i]; j++) {
921       tmp[j] = lindices[(*indices)[i][j]];
922     }
923     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
924     ierr = PetscFree(tmp);CHKERRQ(ierr);
925   }
926 
927   if (debug) { /* -----------------------------------  */
928     nt = *nproc;
929     for (i=0; i<nt; i++) {
930       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
931       for (j=0; j<(*numprocs)[i]; j++) {
932         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
933       }
934       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
935     }
936     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
937   } /* -----------------------------------  */
938 
939   /* wait on sends */
940   if (nsends2) {
941     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
942     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
943     ierr = PetscFree(send_status);CHKERRQ(ierr);
944   }
945 
946   ierr = PetscFree(starts3);CHKERRQ(ierr);
947   ierr = PetscFree(dest);CHKERRQ(ierr);
948   ierr = PetscFree(send_waits);CHKERRQ(ierr);
949 
950   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
951   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
952   ierr = PetscFree(starts);CHKERRQ(ierr);
953   ierr = PetscFree(starts2);CHKERRQ(ierr);
954   ierr = PetscFree(lens2);CHKERRQ(ierr);
955 
956   ierr = PetscFree(source);CHKERRQ(ierr);
957   ierr = PetscFree(len);CHKERRQ(ierr);
958   ierr = PetscFree(recvs);CHKERRQ(ierr);
959   ierr = PetscFree(nprocs);CHKERRQ(ierr);
960   ierr = PetscFree(sends2);CHKERRQ(ierr);
961 
962   /* put the information about myself as the first entry in the list */
963   first_procs    = (*procs)[0];
964   first_numprocs = (*numprocs)[0];
965   first_indices  = (*indices)[0];
966   for (i=0; i<*nproc; i++) {
967     if ((*procs)[i] == rank) {
968       (*procs)[0]    = (*procs)[i];
969       (*numprocs)[0] = (*numprocs)[i];
970       (*indices)[0]  = (*indices)[i];
971       (*procs)[i]    = first_procs;
972       (*numprocs)[i] = first_numprocs;
973       (*indices)[i]  = first_indices;
974       break;
975     }
976   }
977   PetscFunctionReturn(0);
978 }
979 
980 #undef __FUNCT__
981 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
982 /*@C
983     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
984 
985     Collective on ISLocalToGlobalMapping
986 
987     Input Parameters:
988 .   mapping - the mapping from local to global indexing
989 
990     Output Parameter:
991 +   nproc - number of processors that are connected to this one
992 .   proc - neighboring processors
993 .   numproc - number of indices for each processor
994 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
995 
996     Level: advanced
997 
998 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
999           ISLocalToGlobalMappingGetInfo()
1000 @*/
1001 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1002 {
1003   PetscErrorCode ierr;
1004   PetscInt       i;
1005 
1006   PetscFunctionBegin;
1007   ierr = PetscFree(*procs);CHKERRQ(ierr);
1008   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1009   if (*indices) {
1010     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1011     for (i=1; i<*nproc; i++) {
1012       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1013     }
1014     ierr = PetscFree(*indices);CHKERRQ(ierr);
1015   }
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1021 /*@C
1022    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1023 
1024    Not Collective
1025 
1026    Input Arguments:
1027 . ltog - local to global mapping
1028 
1029    Output Arguments:
1030 . array - array of indices
1031 
1032    Level: advanced
1033 
1034 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1035 @*/
1036 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1037 {
1038 
1039   PetscFunctionBegin;
1040   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1041   PetscValidPointer(array,2);
1042   *array = ltog->indices;
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 #undef __FUNCT__
1047 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1048 /*@C
1049    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1050 
1051    Not Collective
1052 
1053    Input Arguments:
1054 + ltog - local to global mapping
1055 - array - array of indices
1056 
1057    Level: advanced
1058 
1059 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1060 @*/
1061 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1062 {
1063 
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1066   PetscValidPointer(array,2);
1067   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1068   *array = PETSC_NULL;
1069   PetscFunctionReturn(0);
1070 }
1071 
1072 #undef __FUNCT__
1073 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1074 /*@C
1075    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1076 
1077    Not Collective
1078 
1079    Input Arguments:
1080 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1081 . n - number of mappings to concatenate
1082 - ltogs - local to global mappings
1083 
1084    Output Arguments:
1085 . ltogcat - new mapping
1086 
1087    Level: advanced
1088 
1089 .seealso: ISLocalToGlobalMappingCreate()
1090 @*/
1091 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1092 {
1093   PetscInt       i,cnt,m,*idx;
1094   PetscErrorCode ierr;
1095 
1096   PetscFunctionBegin;
1097   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1098   if (n > 0) PetscValidPointer(ltogs,3);
1099   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1100   PetscValidPointer(ltogcat,4);
1101   for (cnt=0,i=0; i<n; i++) {
1102     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1103     cnt += m;
1104   }
1105   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1106   for (cnt=0,i=0; i<n; i++) {
1107     const PetscInt *subidx;
1108     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1109     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1110     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1111     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1112     cnt += m;
1113   }
1114   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1115   PetscFunctionReturn(0);
1116 }
1117