xref: /petsc/src/vec/is/utils/isltog.c (revision 63fa5c835b8b49e9f15e3fd165175a1a102ac62d)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
11 /*@
12     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
13 
14     Not Collective
15 
16     Input Parameter:
17 .   ltog - local to global mapping
18 
19     Output Parameter:
20 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
21 
22     Level: advanced
23 
24     Concepts: mapping^local to global
25 
26 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
27 @*/
28 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
29 {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
32   PetscValidIntPointer(n,2);
33   *n = mapping->bs*mapping->n;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "ISLocalToGlobalMappingView"
39 /*@C
40     ISLocalToGlobalMappingView - View a local to global mapping
41 
42     Not Collective
43 
44     Input Parameters:
45 +   ltog - local to global mapping
46 -   viewer - viewer
47 
48     Level: advanced
49 
50     Concepts: mapping^local to global
51 
52 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
53 @*/
54 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
55 {
56   PetscInt       i;
57   PetscMPIInt    rank;
58   PetscBool      iascii;
59   PetscErrorCode ierr;
60 
61   PetscFunctionBegin;
62   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
63   if (!viewer) {
64     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
65   }
66   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
67 
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
69   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
70   if (iascii) {
71     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
72     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73     for (i=0; i<mapping->n; i++) {
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
75     }
76     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
78   }
79   PetscFunctionReturn(0);
80 }
81 
82 #undef __FUNCT__
83 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
84 /*@
85     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
86     ordering and a global parallel ordering.
87 
88     Not collective
89 
90     Input Parameter:
91 .   is - index set containing the global numbers for each local number
92 
93     Output Parameter:
94 .   mapping - new mapping data structure
95 
96     Notes: the block size of the IS determines the block size of the mapping
97     Level: advanced
98 
99     Concepts: mapping^local to global
100 
101 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
102 @*/
103 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
104 {
105   PetscErrorCode ierr;
106   PetscInt       n,bs;
107   const PetscInt *indices;
108   MPI_Comm       comm;
109   PetscBool      isblock;
110 
111   PetscFunctionBegin;
112   PetscValidHeaderSpecific(is,IS_CLASSID,1);
113   PetscValidPointer(mapping,2);
114 
115   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
116   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
117   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
118   if (!isblock) {
119     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
121     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
122   } else {
123     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
124     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
126     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
133 /*@C
134     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
135     ordering and a global parallel ordering.
136 
137     Collective
138 
139     Input Parameter:
140 +   sf - star forest mapping contiguous local indices to (rank, offset)
141 -   start - first global index on this process
142 
143     Output Parameter:
144 .   mapping - new mapping data structure
145 
146     Level: advanced
147 
148     Concepts: mapping^local to global
149 
150 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
151 @*/
152 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
153 {
154   PetscErrorCode ierr;
155   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
156   const PetscInt *ilocal;
157   MPI_Comm       comm;
158 
159   PetscFunctionBegin;
160   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
161   PetscValidPointer(mapping,3);
162 
163   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
164   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
165   if (ilocal) {
166     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
167   }
168   else maxlocal = nleaves;
169   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
170   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
171   for (i=0; i<nroots; i++) globals[i] = start + i;
172   for (i=0; i<maxlocal; i++) ltog[i] = -1;
173   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
174   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
175   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
176   ierr = PetscFree(globals);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "ISLocalToGlobalMappingSetBlockSize"
182 /*@
183     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
184 
185     Not collective
186 
187     Input Parameters:
188 .   mapping - mapping data structure
189 .   bs - the blocksize
190 
191     Level: advanced
192 
193     Concepts: mapping^local to global
194 
195 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
196 @*/
197 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
198 {
199   PetscInt       *nid,*oid;
200   PetscInt       i,on,obs,nn,cum;
201   PetscErrorCode ierr;
202 
203   PetscFunctionBegin;
204   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
205   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
206   if (bs == mapping->bs) PetscFunctionReturn(0);
207   on  = mapping->n;
208   obs = mapping->bs;
209   oid = mapping->indices;
210   nn  = (on*obs)/bs;
211   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
212   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
213   for (i=0,cum=0;i<on;i++) {
214     if (!((oid[i]*obs)%bs)) {
215       if (cum==nn) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices",bs,obs);
216       nid[cum++] = (oid[i]*obs)/bs;
217     }
218   }
219   if (cum != nn) {
220     ierr = PetscFree(nid);CHKERRQ(ierr);
221     SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Incompatible block sizes %D and %D (new block indices found %D != %D)",bs,obs,cum,nn);
222   }
223   mapping->n       = nn;
224   mapping->bs      = bs;
225   ierr             = PetscFree(mapping->indices);CHKERRQ(ierr);
226   mapping->indices = nid;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
232 /*@
233     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
234     ordering and a global parallel ordering.
235 
236     Not Collective
237 
238     Input Parameters:
239 .   mapping - mapping data structure
240 
241     Output Parameter:
242 .   bs - the blocksize
243 
244     Level: advanced
245 
246     Concepts: mapping^local to global
247 
248 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
249 @*/
250 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
251 {
252   PetscFunctionBegin;
253   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
254   *bs = mapping->bs;
255   PetscFunctionReturn(0);
256 }
257 
258 #undef __FUNCT__
259 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
260 /*@
261     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
262     ordering and a global parallel ordering.
263 
264     Not Collective, but communicator may have more than one process
265 
266     Input Parameters:
267 +   comm - MPI communicator
268 .   bs - the block size
269 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
270 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
271 -   mode - see PetscCopyMode
272 
273     Output Parameter:
274 .   mapping - new mapping data structure
275 
276     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
277     Level: advanced
278 
279     Concepts: mapping^local to global
280 
281 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
282 @*/
283 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
284 {
285   PetscErrorCode ierr;
286   PetscInt       *in;
287 
288   PetscFunctionBegin;
289   if (n) PetscValidIntPointer(indices,3);
290   PetscValidPointer(mapping,4);
291 
292   *mapping = NULL;
293   ierr = ISInitializePackage();CHKERRQ(ierr);
294 
295   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
296                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
297   (*mapping)->n             = n;
298   (*mapping)->bs            = bs;
299   (*mapping)->info_cached   = PETSC_FALSE;
300   (*mapping)->info_free     = PETSC_FALSE;
301   (*mapping)->info_procs    = NULL;
302   (*mapping)->info_numprocs = NULL;
303   (*mapping)->info_indices  = NULL;
304   /*
305     Do not create the global to local mapping. This is only created if
306     ISGlobalToLocalMapping() is called
307   */
308   (*mapping)->globals = 0;
309   if (mode == PETSC_COPY_VALUES) {
310     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
311     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
312     (*mapping)->indices = in;
313     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
314   } else if (mode == PETSC_OWN_POINTER) {
315     (*mapping)->indices = (PetscInt*)indices;
316     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
317   }
318   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
319   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
320   PetscFunctionReturn(0);
321 }
322 
323 #undef __FUNCT__
324 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
325 /*@
326    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
327    ordering and a global parallel ordering.
328 
329    Note Collective
330 
331    Input Parameters:
332 .  mapping - mapping data structure
333 
334    Level: advanced
335 
336 .seealso: ISLocalToGlobalMappingCreate()
337 @*/
338 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
339 {
340   PetscErrorCode ierr;
341 
342   PetscFunctionBegin;
343   if (!*mapping) PetscFunctionReturn(0);
344   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
345   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
346   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
347   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
348   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
349   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
350   if ((*mapping)->info_indices) {
351     PetscInt i;
352 
353     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
354     for (i=1; i<(*mapping)->info_nproc; i++) {
355       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
356     }
357     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
358   }
359   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
360   *mapping = 0;
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
366 /*@
367     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
368     a new index set using the global numbering defined in an ISLocalToGlobalMapping
369     context.
370 
371     Not collective
372 
373     Input Parameters:
374 +   mapping - mapping between local and global numbering
375 -   is - index set in local numbering
376 
377     Output Parameters:
378 .   newis - index set in global numbering
379 
380     Level: advanced
381 
382     Concepts: mapping^local to global
383 
384 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
385           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
386 @*/
387 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
388 {
389   PetscErrorCode ierr;
390   PetscInt       n,*idxout;
391   const PetscInt *idxin;
392 
393   PetscFunctionBegin;
394   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
395   PetscValidHeaderSpecific(is,IS_CLASSID,2);
396   PetscValidPointer(newis,3);
397 
398   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
399   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
400   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
401   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
402   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
403   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
404   PetscFunctionReturn(0);
405 }
406 
407 #undef __FUNCT__
408 #define __FUNCT__ "ISLocalToGlobalMappingApply"
409 /*@
410    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
411    and converts them to the global numbering.
412 
413    Not collective
414 
415    Input Parameters:
416 +  mapping - the local to global mapping context
417 .  N - number of integers
418 -  in - input indices in local numbering
419 
420    Output Parameter:
421 .  out - indices in global numbering
422 
423    Notes:
424    The in and out array parameters may be identical.
425 
426    Level: advanced
427 
428 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
429           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
430           AOPetscToApplication(), ISGlobalToLocalMappingApply()
431 
432     Concepts: mapping^local to global
433 @*/
434 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
435 {
436   PetscInt i,bs,Nmax;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
440   bs   = mapping->bs;
441   Nmax = bs*mapping->n;
442   if (bs == 1) {
443     const PetscInt *idx = mapping->indices;
444     for (i=0; i<N; i++) {
445       if (in[i] < 0) {
446         out[i] = in[i];
447         continue;
448       }
449       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
450       out[i] = idx[in[i]];
451     }
452   } else {
453     const PetscInt *idx = mapping->indices;
454     for (i=0; i<N; i++) {
455       if (in[i] < 0) {
456         out[i] = in[i];
457         continue;
458       }
459       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
460       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
461     }
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
468 /*@
469    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
470 
471    Not collective
472 
473    Input Parameters:
474 +  mapping - the local to global mapping context
475 .  N - number of integers
476 -  in - input indices in local block numbering
477 
478    Output Parameter:
479 .  out - indices in global block numbering
480 
481    Notes:
482    The in and out array parameters may be identical.
483 
484    Example:
485      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
486      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
487 
488    Level: advanced
489 
490 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
491           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
492           AOPetscToApplication(), ISGlobalToLocalMappingApply()
493 
494     Concepts: mapping^local to global
495 @*/
496 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
497 {
498 
499   PetscFunctionBegin;
500   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
501   {
502     PetscInt i,Nmax = mapping->n;
503     const PetscInt *idx = mapping->indices;
504 
505     for (i=0; i<N; i++) {
506       if (in[i] < 0) {
507         out[i] = in[i];
508         continue;
509       }
510       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
511       out[i] = idx[in[i]];
512     }
513   }
514   PetscFunctionReturn(0);
515 }
516 
517 /* -----------------------------------------------------------------------------------------*/
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
521 /*
522     Creates the global fields in the ISLocalToGlobalMapping structure
523 */
524 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
525 {
526   PetscErrorCode ierr;
527   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
528 
529   PetscFunctionBegin;
530   end   = 0;
531   start = PETSC_MAX_INT;
532 
533   for (i=0; i<n; i++) {
534     if (idx[i] < 0) continue;
535     if (idx[i] < start) start = idx[i];
536     if (idx[i] > end)   end   = idx[i];
537   }
538   if (start > end) {start = 0; end = -1;}
539   mapping->globalstart = start;
540   mapping->globalend   = end;
541 
542   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
543   mapping->globals = globals;
544   for (i=0; i<end-start+1; i++) globals[i] = -1;
545   for (i=0; i<n; i++) {
546     if (idx[i] < 0) continue;
547     globals[idx[i] - start] = i;
548   }
549 
550   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
551   PetscFunctionReturn(0);
552 }
553 
554 #undef __FUNCT__
555 #define __FUNCT__ "ISGlobalToLocalMappingApply"
556 /*@
557     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
558     specified with a global numbering.
559 
560     Not collective
561 
562     Input Parameters:
563 +   mapping - mapping between local and global numbering
564 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
565            IS_GTOLM_DROP - drops the indices with no local value from the output list
566 .   n - number of global indices to map
567 -   idx - global indices to map
568 
569     Output Parameters:
570 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
571 -   idxout - local index of each global index, one must pass in an array long enough
572              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
573              idxout == NULL to determine the required length (returned in nout)
574              and then allocate the required space and call ISGlobalToLocalMappingApply()
575              a second time to set the values.
576 
577     Notes:
578     Either nout or idxout may be NULL. idx and idxout may be identical.
579 
580     This is not scalable in memory usage. Each processor requires O(Nglobal) size
581     array to compute these.
582 
583     Level: advanced
584 
585     Developer Note: The manual page states that idx and idxout may be identical but the calling
586        sequence declares idx as const so it cannot be the same as idxout.
587 
588     Concepts: mapping^global to local
589 
590 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
591           ISLocalToGlobalMappingDestroy()
592 @*/
593 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
594                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
595 {
596   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
597   PetscErrorCode ierr;
598 
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
601   if (!mapping->globals) {
602     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
603   }
604   globals = mapping->globals;
605   start   = mapping->globalstart;
606   end     = mapping->globalend;
607   bs      = mapping->bs;
608 
609   if (type == IS_GTOLM_MASK) {
610     if (idxout) {
611       for (i=0; i<n; i++) {
612         if (idx[i] < 0)                   idxout[i] = idx[i];
613         else if (idx[i] < bs*start)       idxout[i] = -1;
614         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
615         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
616       }
617     }
618     if (nout) *nout = n;
619   } else {
620     if (idxout) {
621       for (i=0; i<n; i++) {
622         if (idx[i] < 0) continue;
623         if (idx[i] < bs*start) continue;
624         if (idx[i] > bs*(end+1)-1) continue;
625         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
626         if (tmp < 0) continue;
627         idxout[nf++] = tmp;
628       }
629     } else {
630       for (i=0; i<n; i++) {
631         if (idx[i] < 0) continue;
632         if (idx[i] < bs*start) continue;
633         if (idx[i] > bs*(end+1)-1) continue;
634         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
635         if (tmp < 0) continue;
636         nf++;
637       }
638     }
639     if (nout) *nout = nf;
640   }
641   PetscFunctionReturn(0);
642 }
643 
644 #undef __FUNCT__
645 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
646 /*@
647     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
648     a new index set using the local numbering defined in an ISLocalToGlobalMapping
649     context.
650 
651     Not collective
652 
653     Input Parameters:
654 +   mapping - mapping between local and global numbering
655 -   is - index set in global numbering
656 
657     Output Parameters:
658 .   newis - index set in local numbering
659 
660     Level: advanced
661 
662     Concepts: mapping^local to global
663 
664 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
665           ISLocalToGlobalMappingDestroy()
666 @*/
667 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
668 {
669   PetscErrorCode ierr;
670   PetscInt       n,nout,*idxout;
671   const PetscInt *idxin;
672 
673   PetscFunctionBegin;
674   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
675   PetscValidHeaderSpecific(is,IS_CLASSID,3);
676   PetscValidPointer(newis,4);
677 
678   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
679   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
680   if (type == IS_GTOLM_MASK) {
681     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
682   } else {
683     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
684     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
685   }
686   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
687   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
688   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
689   PetscFunctionReturn(0);
690 }
691 
692 #undef __FUNCT__
693 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
694 /*@
695     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
696     specified with a block global numbering.
697 
698     Not collective
699 
700     Input Parameters:
701 +   mapping - mapping between local and global numbering
702 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
703            IS_GTOLM_DROP - drops the indices with no local value from the output list
704 .   n - number of global indices to map
705 -   idx - global indices to map
706 
707     Output Parameters:
708 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
709 -   idxout - local index of each global index, one must pass in an array long enough
710              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
711              idxout == NULL to determine the required length (returned in nout)
712              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
713              a second time to set the values.
714 
715     Notes:
716     Either nout or idxout may be NULL. idx and idxout may be identical.
717 
718     This is not scalable in memory usage. Each processor requires O(Nglobal) size
719     array to compute these.
720 
721     Level: advanced
722 
723     Developer Note: The manual page states that idx and idxout may be identical but the calling
724        sequence declares idx as const so it cannot be the same as idxout.
725 
726     Concepts: mapping^global to local
727 
728 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
729           ISLocalToGlobalMappingDestroy()
730 @*/
731 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
732                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
733 {
734   PetscInt       i,*globals,nf = 0,tmp,start,end;
735   PetscErrorCode ierr;
736 
737   PetscFunctionBegin;
738   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
739   if (!mapping->globals) {
740     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
741   }
742   globals = mapping->globals;
743   start   = mapping->globalstart;
744   end     = mapping->globalend;
745 
746   if (type == IS_GTOLM_MASK) {
747     if (idxout) {
748       for (i=0; i<n; i++) {
749         if (idx[i] < 0) idxout[i] = idx[i];
750         else if (idx[i] < start) idxout[i] = -1;
751         else if (idx[i] > end)   idxout[i] = -1;
752         else                     idxout[i] = globals[idx[i] - start];
753       }
754     }
755     if (nout) *nout = n;
756   } else {
757     if (idxout) {
758       for (i=0; i<n; i++) {
759         if (idx[i] < 0) continue;
760         if (idx[i] < start) continue;
761         if (idx[i] > end) continue;
762         tmp = globals[idx[i] - start];
763         if (tmp < 0) continue;
764         idxout[nf++] = tmp;
765       }
766     } else {
767       for (i=0; i<n; i++) {
768         if (idx[i] < 0) continue;
769         if (idx[i] < start) continue;
770         if (idx[i] > end) continue;
771         tmp = globals[idx[i] - start];
772         if (tmp < 0) continue;
773         nf++;
774       }
775     }
776     if (nout) *nout = nf;
777   }
778   PetscFunctionReturn(0);
779 }
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
783 /*@C
784     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
785      each index shared by more than one processor
786 
787     Collective on ISLocalToGlobalMapping
788 
789     Input Parameters:
790 .   mapping - the mapping from local to global indexing
791 
792     Output Parameter:
793 +   nproc - number of processors that are connected to this one
794 .   proc - neighboring processors
795 .   numproc - number of indices for each subdomain (processor)
796 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
797 
798     Level: advanced
799 
800     Concepts: mapping^local to global
801 
802     Fortran Usage:
803 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
804 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
805           PetscInt indices[nproc][numprocmax],ierr)
806         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
807         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
808 
809 
810 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
811           ISLocalToGlobalMappingRestoreInfo()
812 @*/
813 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
814 {
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
819   if (mapping->info_cached) {
820     *nproc = mapping->info_nproc;
821     *procs = mapping->info_procs;
822     *numprocs = mapping->info_numprocs;
823     *indices = mapping->info_indices;
824   } else {
825     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
826   }
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
832 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
833 {
834   PetscErrorCode ierr;
835   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
836   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
837   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
838   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
839   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
840   PetscInt       first_procs,first_numprocs,*first_indices;
841   MPI_Request    *recv_waits,*send_waits;
842   MPI_Status     recv_status,*send_status,*recv_statuses;
843   MPI_Comm       comm;
844   PetscBool      debug = PETSC_FALSE;
845 
846   PetscFunctionBegin;
847   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
848   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
849   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
850   if (size == 1) {
851     *nproc         = 0;
852     *procs         = NULL;
853     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
854     (*numprocs)[0] = 0;
855     ierr           = PetscNew(indices);CHKERRQ(ierr);
856     (*indices)[0]  = NULL;
857     /* save info for reuse */
858     mapping->info_nproc = *nproc;
859     mapping->info_procs = *procs;
860     mapping->info_numprocs = *numprocs;
861     mapping->info_indices = *indices;
862     mapping->info_cached = PETSC_TRUE;
863     PetscFunctionReturn(0);
864   }
865 
866   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
867 
868   /*
869     Notes on ISLocalToGlobalMappingGetBlockInfo
870 
871     globally owned node - the nodes that have been assigned to this processor in global
872            numbering, just for this routine.
873 
874     nontrivial globally owned node - node assigned to this processor that is on a subdomain
875            boundary (i.e. is has more than one local owner)
876 
877     locally owned node - node that exists on this processors subdomain
878 
879     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
880            local subdomain
881   */
882   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
883   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
884   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
885 
886   for (i=0; i<n; i++) {
887     if (lindices[i] > max) max = lindices[i];
888   }
889   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
890   Ng++;
891   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
892   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
893   scale  = Ng/size + 1;
894   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
895   rstart = scale*rank;
896 
897   /* determine ownership ranges of global indices */
898   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
899   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
900 
901   /* determine owners of each local node  */
902   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
903   for (i=0; i<n; i++) {
904     proc             = lindices[i]/scale; /* processor that globally owns this index */
905     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
906     owner[i]         = proc;
907     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
908   }
909   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
910   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
911 
912   /* inform other processors of number of messages and max length*/
913   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
914   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
915 
916   /* post receives for owned rows */
917   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
918   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
919   for (i=0; i<nrecvs; i++) {
920     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
921   }
922 
923   /* pack messages containing lists of local nodes to owners */
924   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
925   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
926   starts[0] = 0;
927   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
928   for (i=0; i<n; i++) {
929     sends[starts[owner[i]]++] = lindices[i];
930     sends[starts[owner[i]]++] = i;
931   }
932   ierr = PetscFree(owner);CHKERRQ(ierr);
933   starts[0] = 0;
934   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
935 
936   /* send the messages */
937   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
938   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
939   cnt = 0;
940   for (i=0; i<size; i++) {
941     if (nprocs[2*i]) {
942       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
943       dest[cnt] = i;
944       cnt++;
945     }
946   }
947   ierr = PetscFree(starts);CHKERRQ(ierr);
948 
949   /* wait on receives */
950   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
951   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
952   cnt  = nrecvs;
953   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
954   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
955   while (cnt) {
956     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
957     /* unpack receives into our local space */
958     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
959     source[imdex] = recv_status.MPI_SOURCE;
960     len[imdex]    = len[imdex]/2;
961     /* count how many local owners for each of my global owned indices */
962     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
963     cnt--;
964   }
965   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
966 
967   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
968   nowned  = 0;
969   nownedm = 0;
970   for (i=0; i<ng; i++) {
971     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
972   }
973 
974   /* create single array to contain rank of all local owners of each globally owned index */
975   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
976   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
977   starts[0] = 0;
978   for (i=1; i<ng; i++) {
979     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
980     else starts[i] = starts[i-1];
981   }
982 
983   /* for each nontrival globally owned node list all arriving processors */
984   for (i=0; i<nrecvs; i++) {
985     for (j=0; j<len[i]; j++) {
986       node = recvs[2*i*nmax+2*j]-rstart;
987       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
988     }
989   }
990 
991   if (debug) { /* -----------------------------------  */
992     starts[0] = 0;
993     for (i=1; i<ng; i++) {
994       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
995       else starts[i] = starts[i-1];
996     }
997     for (i=0; i<ng; i++) {
998       if (nownedsenders[i] > 1) {
999         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1000         for (j=0; j<nownedsenders[i]; j++) {
1001           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1002         }
1003         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1004       }
1005     }
1006     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1007   } /* -----------------------------------  */
1008 
1009   /* wait on original sends */
1010   if (nsends) {
1011     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1012     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1013     ierr = PetscFree(send_status);CHKERRQ(ierr);
1014   }
1015   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1016   ierr = PetscFree(sends);CHKERRQ(ierr);
1017   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1018 
1019   /* pack messages to send back to local owners */
1020   starts[0] = 0;
1021   for (i=1; i<ng; i++) {
1022     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1023     else starts[i] = starts[i-1];
1024   }
1025   nsends2 = nrecvs;
1026   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1027   for (i=0; i<nrecvs; i++) {
1028     nprocs[i] = 1;
1029     for (j=0; j<len[i]; j++) {
1030       node = recvs[2*i*nmax+2*j]-rstart;
1031       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1032     }
1033   }
1034   nt = 0;
1035   for (i=0; i<nsends2; i++) nt += nprocs[i];
1036 
1037   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1038   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1039 
1040   starts2[0] = 0;
1041   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1042   /*
1043      Each message is 1 + nprocs[i] long, and consists of
1044        (0) the number of nodes being sent back
1045        (1) the local node number,
1046        (2) the number of processors sharing it,
1047        (3) the processors sharing it
1048   */
1049   for (i=0; i<nsends2; i++) {
1050     cnt = 1;
1051     sends2[starts2[i]] = 0;
1052     for (j=0; j<len[i]; j++) {
1053       node = recvs[2*i*nmax+2*j]-rstart;
1054       if (nownedsenders[node] > 1) {
1055         sends2[starts2[i]]++;
1056         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1057         sends2[starts2[i]+cnt++] = nownedsenders[node];
1058         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1059         cnt += nownedsenders[node];
1060       }
1061     }
1062   }
1063 
1064   /* receive the message lengths */
1065   nrecvs2 = nsends;
1066   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1067   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1068   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1069   for (i=0; i<nrecvs2; i++) {
1070     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1071   }
1072 
1073   /* send the message lengths */
1074   for (i=0; i<nsends2; i++) {
1075     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1076   }
1077 
1078   /* wait on receives of lens */
1079   if (nrecvs2) {
1080     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1081     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1082     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1083   }
1084   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1085 
1086   starts3[0] = 0;
1087   nt         = 0;
1088   for (i=0; i<nrecvs2-1; i++) {
1089     starts3[i+1] = starts3[i] + lens2[i];
1090     nt          += lens2[i];
1091   }
1092   if (nrecvs2) nt += lens2[nrecvs2-1];
1093 
1094   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1095   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1096   for (i=0; i<nrecvs2; i++) {
1097     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1098   }
1099 
1100   /* send the messages */
1101   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1102   for (i=0; i<nsends2; i++) {
1103     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1104   }
1105 
1106   /* wait on receives */
1107   if (nrecvs2) {
1108     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1109     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1110     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1111   }
1112   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1113   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1114 
1115   if (debug) { /* -----------------------------------  */
1116     cnt = 0;
1117     for (i=0; i<nrecvs2; i++) {
1118       nt = recvs2[cnt++];
1119       for (j=0; j<nt; j++) {
1120         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1121         for (k=0; k<recvs2[cnt+1]; k++) {
1122           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1123         }
1124         cnt += 2 + recvs2[cnt+1];
1125         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1126       }
1127     }
1128     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1129   } /* -----------------------------------  */
1130 
1131   /* count number subdomains for each local node */
1132   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1133   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1134   cnt  = 0;
1135   for (i=0; i<nrecvs2; i++) {
1136     nt = recvs2[cnt++];
1137     for (j=0; j<nt; j++) {
1138       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1139       cnt += 2 + recvs2[cnt+1];
1140     }
1141   }
1142   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1143   *nproc    = nt;
1144   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1145   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1146   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1147   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1148   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1149   cnt  = 0;
1150   for (i=0; i<size; i++) {
1151     if (nprocs[i] > 0) {
1152       bprocs[i]        = cnt;
1153       (*procs)[cnt]    = i;
1154       (*numprocs)[cnt] = nprocs[i];
1155       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1156       cnt++;
1157     }
1158   }
1159 
1160   /* make the list of subdomains for each nontrivial local node */
1161   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1162   cnt  = 0;
1163   for (i=0; i<nrecvs2; i++) {
1164     nt = recvs2[cnt++];
1165     for (j=0; j<nt; j++) {
1166       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1167       cnt += 2 + recvs2[cnt+1];
1168     }
1169   }
1170   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1171   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1172 
1173   /* sort the node indexing by their global numbers */
1174   nt = *nproc;
1175   for (i=0; i<nt; i++) {
1176     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1177     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1178     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1179     ierr = PetscFree(tmp);CHKERRQ(ierr);
1180   }
1181 
1182   if (debug) { /* -----------------------------------  */
1183     nt = *nproc;
1184     for (i=0; i<nt; i++) {
1185       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1186       for (j=0; j<(*numprocs)[i]; j++) {
1187         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1188       }
1189       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1190     }
1191     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1192   } /* -----------------------------------  */
1193 
1194   /* wait on sends */
1195   if (nsends2) {
1196     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1197     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1198     ierr = PetscFree(send_status);CHKERRQ(ierr);
1199   }
1200 
1201   ierr = PetscFree(starts3);CHKERRQ(ierr);
1202   ierr = PetscFree(dest);CHKERRQ(ierr);
1203   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1204 
1205   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1206   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1207   ierr = PetscFree(starts);CHKERRQ(ierr);
1208   ierr = PetscFree(starts2);CHKERRQ(ierr);
1209   ierr = PetscFree(lens2);CHKERRQ(ierr);
1210 
1211   ierr = PetscFree(source);CHKERRQ(ierr);
1212   ierr = PetscFree(len);CHKERRQ(ierr);
1213   ierr = PetscFree(recvs);CHKERRQ(ierr);
1214   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1215   ierr = PetscFree(sends2);CHKERRQ(ierr);
1216 
1217   /* put the information about myself as the first entry in the list */
1218   first_procs    = (*procs)[0];
1219   first_numprocs = (*numprocs)[0];
1220   first_indices  = (*indices)[0];
1221   for (i=0; i<*nproc; i++) {
1222     if ((*procs)[i] == rank) {
1223       (*procs)[0]    = (*procs)[i];
1224       (*numprocs)[0] = (*numprocs)[i];
1225       (*indices)[0]  = (*indices)[i];
1226       (*procs)[i]    = first_procs;
1227       (*numprocs)[i] = first_numprocs;
1228       (*indices)[i]  = first_indices;
1229       break;
1230     }
1231   }
1232 
1233   /* save info for reuse */
1234   mapping->info_nproc = *nproc;
1235   mapping->info_procs = *procs;
1236   mapping->info_numprocs = *numprocs;
1237   mapping->info_indices = *indices;
1238   mapping->info_cached = PETSC_TRUE;
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 #undef __FUNCT__
1243 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1244 /*@C
1245     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1246 
1247     Collective on ISLocalToGlobalMapping
1248 
1249     Input Parameters:
1250 .   mapping - the mapping from local to global indexing
1251 
1252     Output Parameter:
1253 +   nproc - number of processors that are connected to this one
1254 .   proc - neighboring processors
1255 .   numproc - number of indices for each processor
1256 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1257 
1258     Level: advanced
1259 
1260 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1261           ISLocalToGlobalMappingGetInfo()
1262 @*/
1263 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1264 {
1265   PetscErrorCode ierr;
1266 
1267   PetscFunctionBegin;
1268   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1269   if (mapping->info_free) {
1270     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1271     if (*indices) {
1272       PetscInt i;
1273 
1274       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1275       for (i=1; i<*nproc; i++) {
1276         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1277       }
1278       ierr = PetscFree(*indices);CHKERRQ(ierr);
1279     }
1280   }
1281   *nproc = 0;
1282   *procs = NULL;
1283   *numprocs = NULL;
1284   *indices = NULL;
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #undef __FUNCT__
1289 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1290 /*@C
1291     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1292      each index shared by more than one processor
1293 
1294     Collective on ISLocalToGlobalMapping
1295 
1296     Input Parameters:
1297 .   mapping - the mapping from local to global indexing
1298 
1299     Output Parameter:
1300 +   nproc - number of processors that are connected to this one
1301 .   proc - neighboring processors
1302 .   numproc - number of indices for each subdomain (processor)
1303 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1304 
1305     Level: advanced
1306 
1307     Concepts: mapping^local to global
1308 
1309     Fortran Usage:
1310 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1311 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1312           PetscInt indices[nproc][numprocmax],ierr)
1313         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1314         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1315 
1316 
1317 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1318           ISLocalToGlobalMappingRestoreInfo()
1319 @*/
1320 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1321 {
1322   PetscErrorCode ierr;
1323   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1324 
1325   PetscFunctionBegin;
1326   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1327   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1328   if (bs > 1) { /* we need to expand the cached info */
1329     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1330     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1331     for (i=0; i<*nproc; i++) {
1332       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1333       for (j=0; j<bnumprocs[i]; j++) {
1334         for (k=0; k<bs; k++) {
1335           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1336         }
1337       }
1338       (*numprocs)[i] = bnumprocs[i]*bs;
1339     }
1340     mapping->info_free = PETSC_TRUE;
1341   } else {
1342     *numprocs = bnumprocs;
1343     *indices  = bindices;
1344   }
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 #undef __FUNCT__
1349 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1350 /*@C
1351     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1352 
1353     Collective on ISLocalToGlobalMapping
1354 
1355     Input Parameters:
1356 .   mapping - the mapping from local to global indexing
1357 
1358     Output Parameter:
1359 +   nproc - number of processors that are connected to this one
1360 .   proc - neighboring processors
1361 .   numproc - number of indices for each processor
1362 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1363 
1364     Level: advanced
1365 
1366 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1367           ISLocalToGlobalMappingGetInfo()
1368 @*/
1369 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1370 {
1371   PetscErrorCode ierr;
1372 
1373   PetscFunctionBegin;
1374   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1375   PetscFunctionReturn(0);
1376 }
1377 
1378 #undef __FUNCT__
1379 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1380 /*@C
1381    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1382 
1383    Not Collective
1384 
1385    Input Arguments:
1386 . ltog - local to global mapping
1387 
1388    Output Arguments:
1389 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1390 
1391    Level: advanced
1392 
1393    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1394 
1395 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1396 @*/
1397 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1398 {
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1401   PetscValidPointer(array,2);
1402   if (ltog->bs == 1) {
1403     *array = ltog->indices;
1404   } else {
1405     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1406     const PetscInt *ii;
1407     PetscErrorCode ierr;
1408 
1409     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1410     *array = jj;
1411     k    = 0;
1412     ii   = ltog->indices;
1413     for (i=0; i<n; i++)
1414       for (j=0; j<bs; j++)
1415         jj[k++] = bs*ii[i] + j;
1416   }
1417   PetscFunctionReturn(0);
1418 }
1419 
1420 #undef __FUNCT__
1421 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1422 /*@C
1423    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1424 
1425    Not Collective
1426 
1427    Input Arguments:
1428 + ltog - local to global mapping
1429 - array - array of indices
1430 
1431    Level: advanced
1432 
1433 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1434 @*/
1435 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1436 {
1437   PetscFunctionBegin;
1438   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1439   PetscValidPointer(array,2);
1440   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1441 
1442   if (ltog->bs > 1) {
1443     PetscErrorCode ierr;
1444     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1445   }
1446   PetscFunctionReturn(0);
1447 }
1448 
1449 #undef __FUNCT__
1450 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1451 /*@C
1452    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1453 
1454    Not Collective
1455 
1456    Input Arguments:
1457 . ltog - local to global mapping
1458 
1459    Output Arguments:
1460 . array - array of indices
1461 
1462    Level: advanced
1463 
1464 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1465 @*/
1466 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1467 {
1468   PetscFunctionBegin;
1469   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1470   PetscValidPointer(array,2);
1471   *array = ltog->indices;
1472   PetscFunctionReturn(0);
1473 }
1474 
1475 #undef __FUNCT__
1476 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1477 /*@C
1478    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1479 
1480    Not Collective
1481 
1482    Input Arguments:
1483 + ltog - local to global mapping
1484 - array - array of indices
1485 
1486    Level: advanced
1487 
1488 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1489 @*/
1490 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1491 {
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1494   PetscValidPointer(array,2);
1495   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1496   *array = NULL;
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 #undef __FUNCT__
1501 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1502 /*@C
1503    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1504 
1505    Not Collective
1506 
1507    Input Arguments:
1508 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1509 . n - number of mappings to concatenate
1510 - ltogs - local to global mappings
1511 
1512    Output Arguments:
1513 . ltogcat - new mapping
1514 
1515    Note: this currently always returns a mapping with block size of 1
1516 
1517    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1518 
1519    Level: advanced
1520 
1521 .seealso: ISLocalToGlobalMappingCreate()
1522 @*/
1523 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1524 {
1525   PetscInt       i,cnt,m,*idx;
1526   PetscErrorCode ierr;
1527 
1528   PetscFunctionBegin;
1529   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1530   if (n > 0) PetscValidPointer(ltogs,3);
1531   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1532   PetscValidPointer(ltogcat,4);
1533   for (cnt=0,i=0; i<n; i++) {
1534     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1535     cnt += m;
1536   }
1537   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1538   for (cnt=0,i=0; i<n; i++) {
1539     const PetscInt *subidx;
1540     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1541     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1542     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1543     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1544     cnt += m;
1545   }
1546   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1547   PetscFunctionReturn(0);
1548 }
1549 
1550 
1551