xref: /petsc/src/vec/is/utils/isltog.c (revision c9345713c0ceec43ecc24ae48e7f0ad0720dee63)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
11 /*@
12     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
13 
14     Not Collective
15 
16     Input Parameter:
17 .   ltog - local to global mapping
18 
19     Output Parameter:
20 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
21 
22     Level: advanced
23 
24     Concepts: mapping^local to global
25 
26 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
27 @*/
28 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
29 {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
32   PetscValidIntPointer(n,2);
33   *n = mapping->bs*mapping->n;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "ISLocalToGlobalMappingView"
39 /*@C
40     ISLocalToGlobalMappingView - View a local to global mapping
41 
42     Not Collective
43 
44     Input Parameters:
45 +   ltog - local to global mapping
46 -   viewer - viewer
47 
48     Level: advanced
49 
50     Concepts: mapping^local to global
51 
52 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
53 @*/
54 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
55 {
56   PetscInt       i;
57   PetscMPIInt    rank;
58   PetscBool      iascii;
59   PetscErrorCode ierr;
60 
61   PetscFunctionBegin;
62   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
63   if (!viewer) {
64     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
65   }
66   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
67 
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
69   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
70   if (iascii) {
71     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
72     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73     for (i=0; i<mapping->n; i++) {
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
75     }
76     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
78   }
79   PetscFunctionReturn(0);
80 }
81 
82 #undef __FUNCT__
83 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
84 /*@
85     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
86     ordering and a global parallel ordering.
87 
88     Not collective
89 
90     Input Parameter:
91 .   is - index set containing the global numbers for each local number
92 
93     Output Parameter:
94 .   mapping - new mapping data structure
95 
96     Notes: the block size of the IS determines the block size of the mapping
97     Level: advanced
98 
99     Concepts: mapping^local to global
100 
101 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
102 @*/
103 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
104 {
105   PetscErrorCode ierr;
106   PetscInt       n,bs;
107   const PetscInt *indices;
108   MPI_Comm       comm;
109   PetscBool      isblock;
110 
111   PetscFunctionBegin;
112   PetscValidHeaderSpecific(is,IS_CLASSID,1);
113   PetscValidPointer(mapping,2);
114 
115   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
116   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
117   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
118   if (!isblock) {
119     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
121     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
122   } else {
123     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
124     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
126     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
133 /*@C
134     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
135     ordering and a global parallel ordering.
136 
137     Collective
138 
139     Input Parameter:
140 +   sf - star forest mapping contiguous local indices to (rank, offset)
141 -   start - first global index on this process
142 
143     Output Parameter:
144 .   mapping - new mapping data structure
145 
146     Level: advanced
147 
148     Concepts: mapping^local to global
149 
150 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
151 @*/
152 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
153 {
154   PetscErrorCode ierr;
155   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
156   const PetscInt *ilocal;
157   MPI_Comm       comm;
158 
159   PetscFunctionBegin;
160   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
161   PetscValidPointer(mapping,3);
162 
163   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
164   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
165   if (ilocal) {
166     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
167   }
168   else maxlocal = nleaves;
169   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
170   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
171   for (i=0; i<nroots; i++) globals[i] = start + i;
172   for (i=0; i<maxlocal; i++) ltog[i] = -1;
173   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
174   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
175   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
176   ierr = PetscFree(globals);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "ISLocalToGlobalMappingSetBlockSize"
182 /*@
183     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
184 
185     Not collective
186 
187     Input Parameters:
188 .   mapping - mapping data structure
189 .   bs - the blocksize
190 
191     Level: advanced
192 
193     Concepts: mapping^local to global
194 
195 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
196 @*/
197 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
198 {
199   PetscInt       *nid,*oid;
200   PetscInt       i,cn,on,obs,nn,cum;
201   PetscErrorCode ierr;
202 
203   PetscFunctionBegin;
204   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
205   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
206   if (bs == mapping->bs) PetscFunctionReturn(0);
207   on  = mapping->n;
208   obs = mapping->bs;
209   oid = mapping->indices;
210   nn  = (on*obs)/bs;
211   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
212   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
213   for (i=0,cum=0,cn=0;i<on;i++) {
214     if (oid[i] < 0) cn++;
215     if (cn == bs || (oid[i] > -1 && !((oid[i]*obs)%bs))) {
216       if (cum == nn) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices",bs,obs);
217       if (cn && cn != bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices",bs,obs);
218       if (cn) nid[cum++] = -1;
219       else nid[cum++] = (oid[i]*obs)/bs;
220       cn = 0;
221     }
222   }
223   if (cum != nn || cn) {
224     ierr = PetscFree(nid);CHKERRQ(ierr);
225     SETERRQ5(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Incompatible block sizes %D and %D (new block indices found %D != %D, neg %d)",bs,obs,cum,nn,cn);
226   }
227   mapping->n           = nn;
228   mapping->bs          = bs;
229   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
230   mapping->indices     = nid;
231   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
232   mapping->globalstart = 0;
233   mapping->globalend   = 0;
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
239 /*@
240     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
241     ordering and a global parallel ordering.
242 
243     Not Collective
244 
245     Input Parameters:
246 .   mapping - mapping data structure
247 
248     Output Parameter:
249 .   bs - the blocksize
250 
251     Level: advanced
252 
253     Concepts: mapping^local to global
254 
255 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
256 @*/
257 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
258 {
259   PetscFunctionBegin;
260   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
261   *bs = mapping->bs;
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
267 /*@
268     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
269     ordering and a global parallel ordering.
270 
271     Not Collective, but communicator may have more than one process
272 
273     Input Parameters:
274 +   comm - MPI communicator
275 .   bs - the block size
276 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
277 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
278 -   mode - see PetscCopyMode
279 
280     Output Parameter:
281 .   mapping - new mapping data structure
282 
283     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
284     Level: advanced
285 
286     Concepts: mapping^local to global
287 
288 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
289 @*/
290 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
291 {
292   PetscErrorCode ierr;
293   PetscInt       *in;
294 
295   PetscFunctionBegin;
296   if (n) PetscValidIntPointer(indices,3);
297   PetscValidPointer(mapping,4);
298 
299   *mapping = NULL;
300   ierr = ISInitializePackage();CHKERRQ(ierr);
301 
302   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
303                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
304   (*mapping)->n             = n;
305   (*mapping)->bs            = bs;
306   (*mapping)->info_cached   = PETSC_FALSE;
307   (*mapping)->info_free     = PETSC_FALSE;
308   (*mapping)->info_procs    = NULL;
309   (*mapping)->info_numprocs = NULL;
310   (*mapping)->info_indices  = NULL;
311   /*
312     Do not create the global to local mapping. This is only created if
313     ISGlobalToLocalMapping() is called
314   */
315   (*mapping)->globals = 0;
316   if (mode == PETSC_COPY_VALUES) {
317     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
318     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
319     (*mapping)->indices = in;
320     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
321   } else if (mode == PETSC_OWN_POINTER) {
322     (*mapping)->indices = (PetscInt*)indices;
323     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
324   }
325   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
326   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
332 /*@
333    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
334    ordering and a global parallel ordering.
335 
336    Note Collective
337 
338    Input Parameters:
339 .  mapping - mapping data structure
340 
341    Level: advanced
342 
343 .seealso: ISLocalToGlobalMappingCreate()
344 @*/
345 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
346 {
347   PetscErrorCode ierr;
348 
349   PetscFunctionBegin;
350   if (!*mapping) PetscFunctionReturn(0);
351   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
352   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
353   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
354   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
355   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
356   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
357   if ((*mapping)->info_indices) {
358     PetscInt i;
359 
360     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
361     for (i=1; i<(*mapping)->info_nproc; i++) {
362       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
363     }
364     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
365   }
366   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
367   *mapping = 0;
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
373 /*@
374     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
375     a new index set using the global numbering defined in an ISLocalToGlobalMapping
376     context.
377 
378     Not collective
379 
380     Input Parameters:
381 +   mapping - mapping between local and global numbering
382 -   is - index set in local numbering
383 
384     Output Parameters:
385 .   newis - index set in global numbering
386 
387     Level: advanced
388 
389     Concepts: mapping^local to global
390 
391 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
392           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
393 @*/
394 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
395 {
396   PetscErrorCode ierr;
397   PetscInt       n,*idxout;
398   const PetscInt *idxin;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
402   PetscValidHeaderSpecific(is,IS_CLASSID,2);
403   PetscValidPointer(newis,3);
404 
405   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
406   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
407   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
408   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
409   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
410   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 #undef __FUNCT__
415 #define __FUNCT__ "ISLocalToGlobalMappingApply"
416 /*@
417    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
418    and converts them to the global numbering.
419 
420    Not collective
421 
422    Input Parameters:
423 +  mapping - the local to global mapping context
424 .  N - number of integers
425 -  in - input indices in local numbering
426 
427    Output Parameter:
428 .  out - indices in global numbering
429 
430    Notes:
431    The in and out array parameters may be identical.
432 
433    Level: advanced
434 
435 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
436           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
437           AOPetscToApplication(), ISGlobalToLocalMappingApply()
438 
439     Concepts: mapping^local to global
440 @*/
441 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
442 {
443   PetscInt i,bs,Nmax;
444 
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
447   bs   = mapping->bs;
448   Nmax = bs*mapping->n;
449   if (bs == 1) {
450     const PetscInt *idx = mapping->indices;
451     for (i=0; i<N; i++) {
452       if (in[i] < 0) {
453         out[i] = in[i];
454         continue;
455       }
456       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
457       out[i] = idx[in[i]];
458     }
459   } else {
460     const PetscInt *idx = mapping->indices;
461     for (i=0; i<N; i++) {
462       if (in[i] < 0) {
463         out[i] = in[i];
464         continue;
465       }
466       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
467       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
468     }
469   }
470   PetscFunctionReturn(0);
471 }
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
475 /*@
476    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
477 
478    Not collective
479 
480    Input Parameters:
481 +  mapping - the local to global mapping context
482 .  N - number of integers
483 -  in - input indices in local block numbering
484 
485    Output Parameter:
486 .  out - indices in global block numbering
487 
488    Notes:
489    The in and out array parameters may be identical.
490 
491    Example:
492      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
493      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
494 
495    Level: advanced
496 
497 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
498           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
499           AOPetscToApplication(), ISGlobalToLocalMappingApply()
500 
501     Concepts: mapping^local to global
502 @*/
503 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
504 {
505 
506   PetscFunctionBegin;
507   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
508   {
509     PetscInt i,Nmax = mapping->n;
510     const PetscInt *idx = mapping->indices;
511 
512     for (i=0; i<N; i++) {
513       if (in[i] < 0) {
514         out[i] = in[i];
515         continue;
516       }
517       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
518       out[i] = idx[in[i]];
519     }
520   }
521   PetscFunctionReturn(0);
522 }
523 
524 /* -----------------------------------------------------------------------------------------*/
525 
526 #undef __FUNCT__
527 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
528 /*
529     Creates the global fields in the ISLocalToGlobalMapping structure
530 */
531 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
532 {
533   PetscErrorCode ierr;
534   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
535 
536   PetscFunctionBegin;
537   end   = 0;
538   start = PETSC_MAX_INT;
539 
540   for (i=0; i<n; i++) {
541     if (idx[i] < 0) continue;
542     if (idx[i] < start) start = idx[i];
543     if (idx[i] > end)   end   = idx[i];
544   }
545   if (start > end) {start = 0; end = -1;}
546   mapping->globalstart = start;
547   mapping->globalend   = end;
548 
549   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
550   mapping->globals = globals;
551   for (i=0; i<end-start+1; i++) globals[i] = -1;
552   for (i=0; i<n; i++) {
553     if (idx[i] < 0) continue;
554     globals[idx[i] - start] = i;
555   }
556 
557   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
558   PetscFunctionReturn(0);
559 }
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "ISGlobalToLocalMappingApply"
563 /*@
564     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
565     specified with a global numbering.
566 
567     Not collective
568 
569     Input Parameters:
570 +   mapping - mapping between local and global numbering
571 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
572            IS_GTOLM_DROP - drops the indices with no local value from the output list
573 .   n - number of global indices to map
574 -   idx - global indices to map
575 
576     Output Parameters:
577 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
578 -   idxout - local index of each global index, one must pass in an array long enough
579              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
580              idxout == NULL to determine the required length (returned in nout)
581              and then allocate the required space and call ISGlobalToLocalMappingApply()
582              a second time to set the values.
583 
584     Notes:
585     Either nout or idxout may be NULL. idx and idxout may be identical.
586 
587     This is not scalable in memory usage. Each processor requires O(Nglobal) size
588     array to compute these.
589 
590     Level: advanced
591 
592     Developer Note: The manual page states that idx and idxout may be identical but the calling
593        sequence declares idx as const so it cannot be the same as idxout.
594 
595     Concepts: mapping^global to local
596 
597 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
598           ISLocalToGlobalMappingDestroy()
599 @*/
600 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
601                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
602 {
603   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
604   PetscErrorCode ierr;
605 
606   PetscFunctionBegin;
607   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
608   if (!mapping->globals) {
609     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
610   }
611   globals = mapping->globals;
612   start   = mapping->globalstart;
613   end     = mapping->globalend;
614   bs      = mapping->bs;
615 
616   if (type == IS_GTOLM_MASK) {
617     if (idxout) {
618       for (i=0; i<n; i++) {
619         if (idx[i] < 0)                   idxout[i] = idx[i];
620         else if (idx[i] < bs*start)       idxout[i] = -1;
621         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
622         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
623       }
624     }
625     if (nout) *nout = n;
626   } else {
627     if (idxout) {
628       for (i=0; i<n; i++) {
629         if (idx[i] < 0) continue;
630         if (idx[i] < bs*start) continue;
631         if (idx[i] > bs*(end+1)-1) continue;
632         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
633         if (tmp < 0) continue;
634         idxout[nf++] = tmp;
635       }
636     } else {
637       for (i=0; i<n; i++) {
638         if (idx[i] < 0) continue;
639         if (idx[i] < bs*start) continue;
640         if (idx[i] > bs*(end+1)-1) continue;
641         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
642         if (tmp < 0) continue;
643         nf++;
644       }
645     }
646     if (nout) *nout = nf;
647   }
648   PetscFunctionReturn(0);
649 }
650 
651 #undef __FUNCT__
652 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
653 /*@
654     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
655     a new index set using the local numbering defined in an ISLocalToGlobalMapping
656     context.
657 
658     Not collective
659 
660     Input Parameters:
661 +   mapping - mapping between local and global numbering
662 -   is - index set in global numbering
663 
664     Output Parameters:
665 .   newis - index set in local numbering
666 
667     Level: advanced
668 
669     Concepts: mapping^local to global
670 
671 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
672           ISLocalToGlobalMappingDestroy()
673 @*/
674 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
675 {
676   PetscErrorCode ierr;
677   PetscInt       n,nout,*idxout;
678   const PetscInt *idxin;
679 
680   PetscFunctionBegin;
681   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
682   PetscValidHeaderSpecific(is,IS_CLASSID,3);
683   PetscValidPointer(newis,4);
684 
685   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
686   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
687   if (type == IS_GTOLM_MASK) {
688     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
689   } else {
690     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
691     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
692   }
693   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
694   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
695   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
701 /*@
702     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
703     specified with a block global numbering.
704 
705     Not collective
706 
707     Input Parameters:
708 +   mapping - mapping between local and global numbering
709 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
710            IS_GTOLM_DROP - drops the indices with no local value from the output list
711 .   n - number of global indices to map
712 -   idx - global indices to map
713 
714     Output Parameters:
715 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
716 -   idxout - local index of each global index, one must pass in an array long enough
717              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
718              idxout == NULL to determine the required length (returned in nout)
719              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
720              a second time to set the values.
721 
722     Notes:
723     Either nout or idxout may be NULL. idx and idxout may be identical.
724 
725     This is not scalable in memory usage. Each processor requires O(Nglobal) size
726     array to compute these.
727 
728     Level: advanced
729 
730     Developer Note: The manual page states that idx and idxout may be identical but the calling
731        sequence declares idx as const so it cannot be the same as idxout.
732 
733     Concepts: mapping^global to local
734 
735 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
736           ISLocalToGlobalMappingDestroy()
737 @*/
738 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
739                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
740 {
741   PetscInt       i,*globals,nf = 0,tmp,start,end;
742   PetscErrorCode ierr;
743 
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
746   if (!mapping->globals) {
747     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
748   }
749   globals = mapping->globals;
750   start   = mapping->globalstart;
751   end     = mapping->globalend;
752 
753   if (type == IS_GTOLM_MASK) {
754     if (idxout) {
755       for (i=0; i<n; i++) {
756         if (idx[i] < 0) idxout[i] = idx[i];
757         else if (idx[i] < start) idxout[i] = -1;
758         else if (idx[i] > end)   idxout[i] = -1;
759         else                     idxout[i] = globals[idx[i] - start];
760       }
761     }
762     if (nout) *nout = n;
763   } else {
764     if (idxout) {
765       for (i=0; i<n; i++) {
766         if (idx[i] < 0) continue;
767         if (idx[i] < start) continue;
768         if (idx[i] > end) continue;
769         tmp = globals[idx[i] - start];
770         if (tmp < 0) continue;
771         idxout[nf++] = tmp;
772       }
773     } else {
774       for (i=0; i<n; i++) {
775         if (idx[i] < 0) continue;
776         if (idx[i] < start) continue;
777         if (idx[i] > end) continue;
778         tmp = globals[idx[i] - start];
779         if (tmp < 0) continue;
780         nf++;
781       }
782     }
783     if (nout) *nout = nf;
784   }
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
790 /*@C
791     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
792      each index shared by more than one processor
793 
794     Collective on ISLocalToGlobalMapping
795 
796     Input Parameters:
797 .   mapping - the mapping from local to global indexing
798 
799     Output Parameter:
800 +   nproc - number of processors that are connected to this one
801 .   proc - neighboring processors
802 .   numproc - number of indices for each subdomain (processor)
803 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
804 
805     Level: advanced
806 
807     Concepts: mapping^local to global
808 
809     Fortran Usage:
810 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
811 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
812           PetscInt indices[nproc][numprocmax],ierr)
813         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
814         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
815 
816 
817 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
818           ISLocalToGlobalMappingRestoreInfo()
819 @*/
820 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
821 {
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
826   if (mapping->info_cached) {
827     *nproc = mapping->info_nproc;
828     *procs = mapping->info_procs;
829     *numprocs = mapping->info_numprocs;
830     *indices = mapping->info_indices;
831   } else {
832     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
833   }
834   PetscFunctionReturn(0);
835 }
836 
837 #undef __FUNCT__
838 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
839 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
840 {
841   PetscErrorCode ierr;
842   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
843   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
844   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
845   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
846   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
847   PetscInt       first_procs,first_numprocs,*first_indices;
848   MPI_Request    *recv_waits,*send_waits;
849   MPI_Status     recv_status,*send_status,*recv_statuses;
850   MPI_Comm       comm;
851   PetscBool      debug = PETSC_FALSE;
852 
853   PetscFunctionBegin;
854   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
855   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
856   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
857   if (size == 1) {
858     *nproc         = 0;
859     *procs         = NULL;
860     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
861     (*numprocs)[0] = 0;
862     ierr           = PetscNew(indices);CHKERRQ(ierr);
863     (*indices)[0]  = NULL;
864     /* save info for reuse */
865     mapping->info_nproc = *nproc;
866     mapping->info_procs = *procs;
867     mapping->info_numprocs = *numprocs;
868     mapping->info_indices = *indices;
869     mapping->info_cached = PETSC_TRUE;
870     PetscFunctionReturn(0);
871   }
872 
873   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
874 
875   /*
876     Notes on ISLocalToGlobalMappingGetBlockInfo
877 
878     globally owned node - the nodes that have been assigned to this processor in global
879            numbering, just for this routine.
880 
881     nontrivial globally owned node - node assigned to this processor that is on a subdomain
882            boundary (i.e. is has more than one local owner)
883 
884     locally owned node - node that exists on this processors subdomain
885 
886     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
887            local subdomain
888   */
889   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
890   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
891   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
892 
893   for (i=0; i<n; i++) {
894     if (lindices[i] > max) max = lindices[i];
895   }
896   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
897   Ng++;
898   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
899   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
900   scale  = Ng/size + 1;
901   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
902   rstart = scale*rank;
903 
904   /* determine ownership ranges of global indices */
905   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
906   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
907 
908   /* determine owners of each local node  */
909   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
910   for (i=0; i<n; i++) {
911     proc             = lindices[i]/scale; /* processor that globally owns this index */
912     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
913     owner[i]         = proc;
914     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
915   }
916   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
917   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
918 
919   /* inform other processors of number of messages and max length*/
920   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
921   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
922 
923   /* post receives for owned rows */
924   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
925   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
926   for (i=0; i<nrecvs; i++) {
927     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
928   }
929 
930   /* pack messages containing lists of local nodes to owners */
931   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
932   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
933   starts[0] = 0;
934   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
935   for (i=0; i<n; i++) {
936     sends[starts[owner[i]]++] = lindices[i];
937     sends[starts[owner[i]]++] = i;
938   }
939   ierr = PetscFree(owner);CHKERRQ(ierr);
940   starts[0] = 0;
941   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
942 
943   /* send the messages */
944   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
945   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
946   cnt = 0;
947   for (i=0; i<size; i++) {
948     if (nprocs[2*i]) {
949       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
950       dest[cnt] = i;
951       cnt++;
952     }
953   }
954   ierr = PetscFree(starts);CHKERRQ(ierr);
955 
956   /* wait on receives */
957   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
958   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
959   cnt  = nrecvs;
960   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
961   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
962   while (cnt) {
963     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
964     /* unpack receives into our local space */
965     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
966     source[imdex] = recv_status.MPI_SOURCE;
967     len[imdex]    = len[imdex]/2;
968     /* count how many local owners for each of my global owned indices */
969     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
970     cnt--;
971   }
972   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
973 
974   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
975   nowned  = 0;
976   nownedm = 0;
977   for (i=0; i<ng; i++) {
978     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
979   }
980 
981   /* create single array to contain rank of all local owners of each globally owned index */
982   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
983   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
984   starts[0] = 0;
985   for (i=1; i<ng; i++) {
986     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
987     else starts[i] = starts[i-1];
988   }
989 
990   /* for each nontrival globally owned node list all arriving processors */
991   for (i=0; i<nrecvs; i++) {
992     for (j=0; j<len[i]; j++) {
993       node = recvs[2*i*nmax+2*j]-rstart;
994       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
995     }
996   }
997 
998   if (debug) { /* -----------------------------------  */
999     starts[0] = 0;
1000     for (i=1; i<ng; i++) {
1001       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1002       else starts[i] = starts[i-1];
1003     }
1004     for (i=0; i<ng; i++) {
1005       if (nownedsenders[i] > 1) {
1006         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1007         for (j=0; j<nownedsenders[i]; j++) {
1008           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1009         }
1010         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1011       }
1012     }
1013     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1014   } /* -----------------------------------  */
1015 
1016   /* wait on original sends */
1017   if (nsends) {
1018     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1019     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1020     ierr = PetscFree(send_status);CHKERRQ(ierr);
1021   }
1022   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1023   ierr = PetscFree(sends);CHKERRQ(ierr);
1024   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1025 
1026   /* pack messages to send back to local owners */
1027   starts[0] = 0;
1028   for (i=1; i<ng; i++) {
1029     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1030     else starts[i] = starts[i-1];
1031   }
1032   nsends2 = nrecvs;
1033   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1034   for (i=0; i<nrecvs; i++) {
1035     nprocs[i] = 1;
1036     for (j=0; j<len[i]; j++) {
1037       node = recvs[2*i*nmax+2*j]-rstart;
1038       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1039     }
1040   }
1041   nt = 0;
1042   for (i=0; i<nsends2; i++) nt += nprocs[i];
1043 
1044   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1045   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1046 
1047   starts2[0] = 0;
1048   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1049   /*
1050      Each message is 1 + nprocs[i] long, and consists of
1051        (0) the number of nodes being sent back
1052        (1) the local node number,
1053        (2) the number of processors sharing it,
1054        (3) the processors sharing it
1055   */
1056   for (i=0; i<nsends2; i++) {
1057     cnt = 1;
1058     sends2[starts2[i]] = 0;
1059     for (j=0; j<len[i]; j++) {
1060       node = recvs[2*i*nmax+2*j]-rstart;
1061       if (nownedsenders[node] > 1) {
1062         sends2[starts2[i]]++;
1063         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1064         sends2[starts2[i]+cnt++] = nownedsenders[node];
1065         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1066         cnt += nownedsenders[node];
1067       }
1068     }
1069   }
1070 
1071   /* receive the message lengths */
1072   nrecvs2 = nsends;
1073   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1074   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1075   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1076   for (i=0; i<nrecvs2; i++) {
1077     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1078   }
1079 
1080   /* send the message lengths */
1081   for (i=0; i<nsends2; i++) {
1082     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1083   }
1084 
1085   /* wait on receives of lens */
1086   if (nrecvs2) {
1087     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1088     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1089     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1090   }
1091   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1092 
1093   starts3[0] = 0;
1094   nt         = 0;
1095   for (i=0; i<nrecvs2-1; i++) {
1096     starts3[i+1] = starts3[i] + lens2[i];
1097     nt          += lens2[i];
1098   }
1099   if (nrecvs2) nt += lens2[nrecvs2-1];
1100 
1101   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1102   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1103   for (i=0; i<nrecvs2; i++) {
1104     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1105   }
1106 
1107   /* send the messages */
1108   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1109   for (i=0; i<nsends2; i++) {
1110     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1111   }
1112 
1113   /* wait on receives */
1114   if (nrecvs2) {
1115     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1116     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1117     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1118   }
1119   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1120   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1121 
1122   if (debug) { /* -----------------------------------  */
1123     cnt = 0;
1124     for (i=0; i<nrecvs2; i++) {
1125       nt = recvs2[cnt++];
1126       for (j=0; j<nt; j++) {
1127         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1128         for (k=0; k<recvs2[cnt+1]; k++) {
1129           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1130         }
1131         cnt += 2 + recvs2[cnt+1];
1132         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1133       }
1134     }
1135     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1136   } /* -----------------------------------  */
1137 
1138   /* count number subdomains for each local node */
1139   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1140   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1141   cnt  = 0;
1142   for (i=0; i<nrecvs2; i++) {
1143     nt = recvs2[cnt++];
1144     for (j=0; j<nt; j++) {
1145       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1146       cnt += 2 + recvs2[cnt+1];
1147     }
1148   }
1149   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1150   *nproc    = nt;
1151   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1152   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1153   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1154   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1155   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1156   cnt  = 0;
1157   for (i=0; i<size; i++) {
1158     if (nprocs[i] > 0) {
1159       bprocs[i]        = cnt;
1160       (*procs)[cnt]    = i;
1161       (*numprocs)[cnt] = nprocs[i];
1162       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1163       cnt++;
1164     }
1165   }
1166 
1167   /* make the list of subdomains for each nontrivial local node */
1168   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1169   cnt  = 0;
1170   for (i=0; i<nrecvs2; i++) {
1171     nt = recvs2[cnt++];
1172     for (j=0; j<nt; j++) {
1173       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1174       cnt += 2 + recvs2[cnt+1];
1175     }
1176   }
1177   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1178   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1179 
1180   /* sort the node indexing by their global numbers */
1181   nt = *nproc;
1182   for (i=0; i<nt; i++) {
1183     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1184     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1185     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1186     ierr = PetscFree(tmp);CHKERRQ(ierr);
1187   }
1188 
1189   if (debug) { /* -----------------------------------  */
1190     nt = *nproc;
1191     for (i=0; i<nt; i++) {
1192       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1193       for (j=0; j<(*numprocs)[i]; j++) {
1194         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1195       }
1196       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1197     }
1198     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1199   } /* -----------------------------------  */
1200 
1201   /* wait on sends */
1202   if (nsends2) {
1203     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1204     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1205     ierr = PetscFree(send_status);CHKERRQ(ierr);
1206   }
1207 
1208   ierr = PetscFree(starts3);CHKERRQ(ierr);
1209   ierr = PetscFree(dest);CHKERRQ(ierr);
1210   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1211 
1212   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1213   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1214   ierr = PetscFree(starts);CHKERRQ(ierr);
1215   ierr = PetscFree(starts2);CHKERRQ(ierr);
1216   ierr = PetscFree(lens2);CHKERRQ(ierr);
1217 
1218   ierr = PetscFree(source);CHKERRQ(ierr);
1219   ierr = PetscFree(len);CHKERRQ(ierr);
1220   ierr = PetscFree(recvs);CHKERRQ(ierr);
1221   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1222   ierr = PetscFree(sends2);CHKERRQ(ierr);
1223 
1224   /* put the information about myself as the first entry in the list */
1225   first_procs    = (*procs)[0];
1226   first_numprocs = (*numprocs)[0];
1227   first_indices  = (*indices)[0];
1228   for (i=0; i<*nproc; i++) {
1229     if ((*procs)[i] == rank) {
1230       (*procs)[0]    = (*procs)[i];
1231       (*numprocs)[0] = (*numprocs)[i];
1232       (*indices)[0]  = (*indices)[i];
1233       (*procs)[i]    = first_procs;
1234       (*numprocs)[i] = first_numprocs;
1235       (*indices)[i]  = first_indices;
1236       break;
1237     }
1238   }
1239 
1240   /* save info for reuse */
1241   mapping->info_nproc = *nproc;
1242   mapping->info_procs = *procs;
1243   mapping->info_numprocs = *numprocs;
1244   mapping->info_indices = *indices;
1245   mapping->info_cached = PETSC_TRUE;
1246   PetscFunctionReturn(0);
1247 }
1248 
1249 #undef __FUNCT__
1250 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1251 /*@C
1252     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1253 
1254     Collective on ISLocalToGlobalMapping
1255 
1256     Input Parameters:
1257 .   mapping - the mapping from local to global indexing
1258 
1259     Output Parameter:
1260 +   nproc - number of processors that are connected to this one
1261 .   proc - neighboring processors
1262 .   numproc - number of indices for each processor
1263 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1264 
1265     Level: advanced
1266 
1267 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1268           ISLocalToGlobalMappingGetInfo()
1269 @*/
1270 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1271 {
1272   PetscErrorCode ierr;
1273 
1274   PetscFunctionBegin;
1275   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1276   if (mapping->info_free) {
1277     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1278     if (*indices) {
1279       PetscInt i;
1280 
1281       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1282       for (i=1; i<*nproc; i++) {
1283         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1284       }
1285       ierr = PetscFree(*indices);CHKERRQ(ierr);
1286     }
1287   }
1288   *nproc = 0;
1289   *procs = NULL;
1290   *numprocs = NULL;
1291   *indices = NULL;
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 #undef __FUNCT__
1296 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1297 /*@C
1298     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1299      each index shared by more than one processor
1300 
1301     Collective on ISLocalToGlobalMapping
1302 
1303     Input Parameters:
1304 .   mapping - the mapping from local to global indexing
1305 
1306     Output Parameter:
1307 +   nproc - number of processors that are connected to this one
1308 .   proc - neighboring processors
1309 .   numproc - number of indices for each subdomain (processor)
1310 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1311 
1312     Level: advanced
1313 
1314     Concepts: mapping^local to global
1315 
1316     Fortran Usage:
1317 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1318 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1319           PetscInt indices[nproc][numprocmax],ierr)
1320         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1321         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1322 
1323 
1324 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1325           ISLocalToGlobalMappingRestoreInfo()
1326 @*/
1327 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1328 {
1329   PetscErrorCode ierr;
1330   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1331 
1332   PetscFunctionBegin;
1333   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1334   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1335   if (bs > 1) { /* we need to expand the cached info */
1336     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1337     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1338     for (i=0; i<*nproc; i++) {
1339       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1340       for (j=0; j<bnumprocs[i]; j++) {
1341         for (k=0; k<bs; k++) {
1342           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1343         }
1344       }
1345       (*numprocs)[i] = bnumprocs[i]*bs;
1346     }
1347     mapping->info_free = PETSC_TRUE;
1348   } else {
1349     *numprocs = bnumprocs;
1350     *indices  = bindices;
1351   }
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 #undef __FUNCT__
1356 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1357 /*@C
1358     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1359 
1360     Collective on ISLocalToGlobalMapping
1361 
1362     Input Parameters:
1363 .   mapping - the mapping from local to global indexing
1364 
1365     Output Parameter:
1366 +   nproc - number of processors that are connected to this one
1367 .   proc - neighboring processors
1368 .   numproc - number of indices for each processor
1369 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1370 
1371     Level: advanced
1372 
1373 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1374           ISLocalToGlobalMappingGetInfo()
1375 @*/
1376 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1377 {
1378   PetscErrorCode ierr;
1379 
1380   PetscFunctionBegin;
1381   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1382   PetscFunctionReturn(0);
1383 }
1384 
1385 #undef __FUNCT__
1386 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1387 /*@C
1388    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1389 
1390    Not Collective
1391 
1392    Input Arguments:
1393 . ltog - local to global mapping
1394 
1395    Output Arguments:
1396 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1397 
1398    Level: advanced
1399 
1400    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1401 
1402 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1403 @*/
1404 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1405 {
1406   PetscFunctionBegin;
1407   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1408   PetscValidPointer(array,2);
1409   if (ltog->bs == 1) {
1410     *array = ltog->indices;
1411   } else {
1412     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1413     const PetscInt *ii;
1414     PetscErrorCode ierr;
1415 
1416     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1417     *array = jj;
1418     k    = 0;
1419     ii   = ltog->indices;
1420     for (i=0; i<n; i++)
1421       for (j=0; j<bs; j++)
1422         jj[k++] = bs*ii[i] + j;
1423   }
1424   PetscFunctionReturn(0);
1425 }
1426 
1427 #undef __FUNCT__
1428 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1429 /*@C
1430    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1431 
1432    Not Collective
1433 
1434    Input Arguments:
1435 + ltog - local to global mapping
1436 - array - array of indices
1437 
1438    Level: advanced
1439 
1440 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1441 @*/
1442 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1443 {
1444   PetscFunctionBegin;
1445   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1446   PetscValidPointer(array,2);
1447   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1448 
1449   if (ltog->bs > 1) {
1450     PetscErrorCode ierr;
1451     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1452   }
1453   PetscFunctionReturn(0);
1454 }
1455 
1456 #undef __FUNCT__
1457 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1458 /*@C
1459    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1460 
1461    Not Collective
1462 
1463    Input Arguments:
1464 . ltog - local to global mapping
1465 
1466    Output Arguments:
1467 . array - array of indices
1468 
1469    Level: advanced
1470 
1471 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1472 @*/
1473 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1474 {
1475   PetscFunctionBegin;
1476   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1477   PetscValidPointer(array,2);
1478   *array = ltog->indices;
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 #undef __FUNCT__
1483 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1484 /*@C
1485    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1486 
1487    Not Collective
1488 
1489    Input Arguments:
1490 + ltog - local to global mapping
1491 - array - array of indices
1492 
1493    Level: advanced
1494 
1495 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1496 @*/
1497 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1498 {
1499   PetscFunctionBegin;
1500   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1501   PetscValidPointer(array,2);
1502   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1503   *array = NULL;
1504   PetscFunctionReturn(0);
1505 }
1506 
1507 #undef __FUNCT__
1508 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1509 /*@C
1510    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1511 
1512    Not Collective
1513 
1514    Input Arguments:
1515 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1516 . n - number of mappings to concatenate
1517 - ltogs - local to global mappings
1518 
1519    Output Arguments:
1520 . ltogcat - new mapping
1521 
1522    Note: this currently always returns a mapping with block size of 1
1523 
1524    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1525 
1526    Level: advanced
1527 
1528 .seealso: ISLocalToGlobalMappingCreate()
1529 @*/
1530 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1531 {
1532   PetscInt       i,cnt,m,*idx;
1533   PetscErrorCode ierr;
1534 
1535   PetscFunctionBegin;
1536   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1537   if (n > 0) PetscValidPointer(ltogs,3);
1538   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1539   PetscValidPointer(ltogcat,4);
1540   for (cnt=0,i=0; i<n; i++) {
1541     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1542     cnt += m;
1543   }
1544   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1545   for (cnt=0,i=0; i<n; i++) {
1546     const PetscInt *subidx;
1547     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1548     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1549     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1550     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1551     cnt += m;
1552   }
1553   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1554   PetscFunctionReturn(0);
1555 }
1556 
1557 
1558