xref: /petsc/src/vec/is/utils/isltog.c (revision cd5a51df59a27f6d1f21820231b52f6dcc92535a)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 /*@
10     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   nltog - the duplicated local to global mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
27 {
28   PetscErrorCode ierr;
29 
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
32   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
33   PetscFunctionReturn(0);
34 }
35 
36 /*@
37     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
38 
39     Not Collective
40 
41     Input Parameter:
42 .   ltog - local to global mapping
43 
44     Output Parameter:
45 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
54 {
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
57   PetscValidIntPointer(n,2);
58   *n = mapping->bs*mapping->n;
59   PetscFunctionReturn(0);
60 }
61 
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 /*@
106     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
107     ordering and a global parallel ordering.
108 
109     Not collective
110 
111     Input Parameter:
112 .   is - index set containing the global numbers for each local number
113 
114     Output Parameter:
115 .   mapping - new mapping data structure
116 
117     Notes: the block size of the IS determines the block size of the mapping
118     Level: advanced
119 
120     Concepts: mapping^local to global
121 
122 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
123 @*/
124 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
125 {
126   PetscErrorCode ierr;
127   PetscInt       n,bs;
128   const PetscInt *indices;
129   MPI_Comm       comm;
130   PetscBool      isblock;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(is,IS_CLASSID,1);
134   PetscValidPointer(mapping,2);
135 
136   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
137   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
138   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
139   if (!isblock) {
140     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
141     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
142     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
143   } else {
144     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
145     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
146     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
147     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 /*@C
153     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
154     ordering and a global parallel ordering.
155 
156     Collective
157 
158     Input Parameter:
159 +   sf - star forest mapping contiguous local indices to (rank, offset)
160 -   start - first global index on this process
161 
162     Output Parameter:
163 .   mapping - new mapping data structure
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
170 @*/
171 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
172 {
173   PetscErrorCode ierr;
174   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
175   const PetscInt *ilocal;
176   MPI_Comm       comm;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
180   PetscValidPointer(mapping,3);
181 
182   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
183   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
184   if (ilocal) {
185     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
186   }
187   else maxlocal = nleaves;
188   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
189   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
190   for (i=0; i<nroots; i++) globals[i] = start + i;
191   for (i=0; i<maxlocal; i++) ltog[i] = -1;
192   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
193   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
194   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
195   ierr = PetscFree(globals);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 /*@
200     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
201 
202     Not collective
203 
204     Input Parameters:
205 .   mapping - mapping data structure
206 .   bs - the blocksize
207 
208     Level: advanced
209 
210     Concepts: mapping^local to global
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
215 {
216   PetscInt       *nid;
217   const PetscInt *oid;
218   PetscInt       i,cn,on,obs,nn;
219   PetscErrorCode ierr;
220 
221   PetscFunctionBegin;
222   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
223   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
224   if (bs == mapping->bs) PetscFunctionReturn(0);
225   on  = mapping->n;
226   obs = mapping->bs;
227   oid = mapping->indices;
228   nn  = (on*obs)/bs;
229   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
230 
231   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
232   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
233   for (i=0;i<nn;i++) {
234     PetscInt j;
235     for (j=0,cn=0;j<bs-1;j++) {
236       if (oid[i*bs+j] < 0) { cn++; continue; }
237       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
238     }
239     if (oid[i*bs+j] < 0) cn++;
240     if (cn) {
241       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
242       nid[i] = -1;
243     } else {
244       nid[i] = oid[i*bs]/bs;
245     }
246   }
247   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
248 
249   mapping->n           = nn;
250   mapping->bs          = bs;
251   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
252   mapping->indices     = nid;
253   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
254   mapping->globalstart = 0;
255   mapping->globalend   = 0;
256   PetscFunctionReturn(0);
257 }
258 
259 /*@
260     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
261     ordering and a global parallel ordering.
262 
263     Not Collective
264 
265     Input Parameters:
266 .   mapping - mapping data structure
267 
268     Output Parameter:
269 .   bs - the blocksize
270 
271     Level: advanced
272 
273     Concepts: mapping^local to global
274 
275 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
276 @*/
277 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
278 {
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
281   *bs = mapping->bs;
282   PetscFunctionReturn(0);
283 }
284 
285 /*@
286     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
287     ordering and a global parallel ordering.
288 
289     Not Collective, but communicator may have more than one process
290 
291     Input Parameters:
292 +   comm - MPI communicator
293 .   bs - the block size
294 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
295 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
296 -   mode - see PetscCopyMode
297 
298     Output Parameter:
299 .   mapping - new mapping data structure
300 
301     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
302     Level: advanced
303 
304     Concepts: mapping^local to global
305 
306 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
307 @*/
308 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
309 {
310   PetscErrorCode ierr;
311   PetscInt       *in;
312 
313   PetscFunctionBegin;
314   if (n) PetscValidIntPointer(indices,3);
315   PetscValidPointer(mapping,4);
316 
317   *mapping = NULL;
318   ierr = ISInitializePackage();CHKERRQ(ierr);
319 
320   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
321                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
322   (*mapping)->n             = n;
323   (*mapping)->bs            = bs;
324   (*mapping)->info_cached   = PETSC_FALSE;
325   (*mapping)->info_free     = PETSC_FALSE;
326   (*mapping)->info_procs    = NULL;
327   (*mapping)->info_numprocs = NULL;
328   (*mapping)->info_indices  = NULL;
329   /*
330     Do not create the global to local mapping. This is only created if
331     ISGlobalToLocalMapping() is called
332   */
333   (*mapping)->globals = 0;
334   (*mapping)->globalht = 0;
335   (*mapping)->use_hash_table = PETSC_TRUE;
336   if (mode == PETSC_COPY_VALUES) {
337     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
338     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
339     (*mapping)->indices = in;
340     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
341   } else if (mode == PETSC_OWN_POINTER) {
342     (*mapping)->indices = (PetscInt*)indices;
343     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
344   }
345   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
346   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
347   PetscFunctionReturn(0);
348 }
349 
350 /*@
351    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
352    ordering and a global parallel ordering.
353 
354    Note Collective
355 
356    Input Parameters:
357 .  mapping - mapping data structure
358 
359    Level: advanced
360 
361 .seealso: ISLocalToGlobalMappingCreate()
362 @*/
363 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
364 {
365   PetscErrorCode ierr;
366 
367   PetscFunctionBegin;
368   if (!*mapping) PetscFunctionReturn(0);
369   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
370   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
371   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
372   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
373   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
374   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
375   PetscHashIDestroy((*mapping)->globalht);
376   if ((*mapping)->info_indices) {
377     PetscInt i;
378 
379     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
380     for (i=1; i<(*mapping)->info_nproc; i++) {
381       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
382     }
383     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
384   }
385   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
386   *mapping = 0;
387   PetscFunctionReturn(0);
388 }
389 
390 /*@
391     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
392     a new index set using the global numbering defined in an ISLocalToGlobalMapping
393     context.
394 
395     Not collective
396 
397     Input Parameters:
398 +   mapping - mapping between local and global numbering
399 -   is - index set in local numbering
400 
401     Output Parameters:
402 .   newis - index set in global numbering
403 
404     Level: advanced
405 
406     Concepts: mapping^local to global
407 
408 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
409           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
410 @*/
411 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
412 {
413   PetscErrorCode ierr;
414   PetscInt       n,*idxout;
415   const PetscInt *idxin;
416 
417   PetscFunctionBegin;
418   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
419   PetscValidHeaderSpecific(is,IS_CLASSID,2);
420   PetscValidPointer(newis,3);
421 
422   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
423   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
424   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
425   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
426   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
427   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
428   PetscFunctionReturn(0);
429 }
430 
431 /*@
432    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
433    and converts them to the global numbering.
434 
435    Not collective
436 
437    Input Parameters:
438 +  mapping - the local to global mapping context
439 .  N - number of integers
440 -  in - input indices in local numbering
441 
442    Output Parameter:
443 .  out - indices in global numbering
444 
445    Notes:
446    The in and out array parameters may be identical.
447 
448    Level: advanced
449 
450 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
451           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
452           AOPetscToApplication(), ISGlobalToLocalMappingApply()
453 
454     Concepts: mapping^local to global
455 @*/
456 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
457 {
458   PetscInt i,bs,Nmax;
459 
460   PetscFunctionBegin;
461   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
462   bs   = mapping->bs;
463   Nmax = bs*mapping->n;
464   if (bs == 1) {
465     const PetscInt *idx = mapping->indices;
466     for (i=0; i<N; i++) {
467       if (in[i] < 0) {
468         out[i] = in[i];
469         continue;
470       }
471       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
472       out[i] = idx[in[i]];
473     }
474   } else {
475     const PetscInt *idx = mapping->indices;
476     for (i=0; i<N; i++) {
477       if (in[i] < 0) {
478         out[i] = in[i];
479         continue;
480       }
481       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
482       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
483     }
484   }
485   PetscFunctionReturn(0);
486 }
487 
488 /*@
489    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
490 
491    Not collective
492 
493    Input Parameters:
494 +  mapping - the local to global mapping context
495 .  N - number of integers
496 -  in - input indices in local block numbering
497 
498    Output Parameter:
499 .  out - indices in global block numbering
500 
501    Notes:
502    The in and out array parameters may be identical.
503 
504    Example:
505      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
506      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
507 
508    Level: advanced
509 
510 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
511           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
512           AOPetscToApplication(), ISGlobalToLocalMappingApply()
513 
514     Concepts: mapping^local to global
515 @*/
516 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
517 {
518 
519   PetscFunctionBegin;
520   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
521   {
522     PetscInt i,Nmax = mapping->n;
523     const PetscInt *idx = mapping->indices;
524 
525     for (i=0; i<N; i++) {
526       if (in[i] < 0) {
527         out[i] = in[i];
528         continue;
529       }
530       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
531       out[i] = idx[in[i]];
532     }
533   }
534   PetscFunctionReturn(0);
535 }
536 
537 /* -----------------------------------------------------------------------------------------*/
538 
539 /*
540     Creates the global fields in the ISLocalToGlobalMapping structure
541 */
542 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
543 {
544   PetscErrorCode ierr;
545   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
546 
547   PetscFunctionBegin;
548   if (mapping->use_hash_table && mapping->globalht) PetscFunctionReturn(0);
549   else if (mapping->globals) PetscFunctionReturn(0);
550   end   = 0;
551   start = PETSC_MAX_INT;
552 
553   for (i=0; i<n; i++) {
554     if (idx[i] < 0) continue;
555     if (idx[i] < start) start = idx[i];
556     if (idx[i] > end)   end   = idx[i];
557   }
558   if (start > end) {start = 0; end = -1;}
559   mapping->globalstart = start;
560   mapping->globalend   = end;
561 
562   if (mapping->use_hash_table) {
563     PetscHashICreate(mapping->globalht);
564     for (i=0; i<n; i++ ) {
565       if (idx[i] < 0) continue;
566       PetscHashIAdd(mapping->globalht, idx[i], i);
567     }
568     ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
569   } else {
570     ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
571     mapping->globals = globals;
572     for (i=0; i<end-start+1; i++) globals[i] = -1;
573     for (i=0; i<n; i++) {
574       if (idx[i] < 0) continue;
575       globals[idx[i] - start] = i;
576     }
577 
578     ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 /*@
584     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
585     specified with a global numbering.
586 
587     Not collective
588 
589     Input Parameters:
590 +   mapping - mapping between local and global numbering
591 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
592            IS_GTOLM_DROP - drops the indices with no local value from the output list
593 .   n - number of global indices to map
594 -   idx - global indices to map
595 
596     Output Parameters:
597 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
598 -   idxout - local index of each global index, one must pass in an array long enough
599              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
600              idxout == NULL to determine the required length (returned in nout)
601              and then allocate the required space and call ISGlobalToLocalMappingApply()
602              a second time to set the values.
603 
604     Notes:
605     Either nout or idxout may be NULL. idx and idxout may be identical.
606 
607     This is not scalable in memory usage. Each processor requires O(Nglobal) size
608     array to compute these.
609 
610     Level: advanced
611 
612     Developer Note: The manual page states that idx and idxout may be identical but the calling
613        sequence declares idx as const so it cannot be the same as idxout.
614 
615     Concepts: mapping^global to local
616 
617 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
618           ISLocalToGlobalMappingDestroy()
619 @*/
620 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
621                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
622 {
623   PetscInt       i,*globals,nf = 0,tmp,start,end,bs,local;
624   const PetscBool use_hash_table = mapping->use_hash_table;
625   PetscErrorCode ierr;
626 
627   PetscFunctionBegin;
628   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
629   ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
630   globals = mapping->globals;
631   start   = mapping->globalstart;
632   end     = mapping->globalend;
633   bs      = mapping->bs;
634 
635 #define GTOL(g, local) do {                        \
636     if (use_hash_table) {                          \
637       PetscHashIMap(mapping->globalht,g/bs,local); \
638     } else {                                       \
639       local = globals[g/bs - start];               \
640     }                                              \
641     local = bs*local + (g % bs);                   \
642   } while (0)
643 
644   if (type == IS_GTOLM_MASK) {
645     if (idxout) {
646       for (i=0; i<n; i++) {
647         if (idx[i] < 0)                   idxout[i] = idx[i];
648         else if (idx[i] < bs*start)       idxout[i] = -1;
649         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
650         else                              GTOL(idx[i], local);
651       }
652     }
653     if (nout) *nout = n;
654   } else {
655     if (idxout) {
656       for (i=0; i<n; i++) {
657         if (idx[i] < 0) continue;
658         if (idx[i] < bs*start) continue;
659         if (idx[i] > bs*(end+1)-1) continue;
660         GTOL(idx[i], tmp);
661         if (tmp < 0) continue;
662         idxout[nf++] = tmp;
663       }
664     } else {
665       for (i=0; i<n; i++) {
666         if (idx[i] < 0) continue;
667         if (idx[i] < bs*start) continue;
668         if (idx[i] > bs*(end+1)-1) continue;
669         GTOL(idx[i], tmp);
670         if (tmp < 0) continue;
671         nf++;
672       }
673     }
674     if (nout) *nout = nf;
675   }
676 #undef GTOL
677   PetscFunctionReturn(0);
678 }
679 
680 /*@
681     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
682     a new index set using the local numbering defined in an ISLocalToGlobalMapping
683     context.
684 
685     Not collective
686 
687     Input Parameters:
688 +   mapping - mapping between local and global numbering
689 -   is - index set in global numbering
690 
691     Output Parameters:
692 .   newis - index set in local numbering
693 
694     Level: advanced
695 
696     Concepts: mapping^local to global
697 
698 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
699           ISLocalToGlobalMappingDestroy()
700 @*/
701 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
702 {
703   PetscErrorCode ierr;
704   PetscInt       n,nout,*idxout;
705   const PetscInt *idxin;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
709   PetscValidHeaderSpecific(is,IS_CLASSID,3);
710   PetscValidPointer(newis,4);
711 
712   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
713   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
714   if (type == IS_GTOLM_MASK) {
715     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
716   } else {
717     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
718     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
719   }
720   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
721   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
722   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
723   PetscFunctionReturn(0);
724 }
725 
726 /*@
727     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
728     specified with a block global numbering.
729 
730     Not collective
731 
732     Input Parameters:
733 +   mapping - mapping between local and global numbering
734 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
735            IS_GTOLM_DROP - drops the indices with no local value from the output list
736 .   n - number of global indices to map
737 -   idx - global indices to map
738 
739     Output Parameters:
740 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
741 -   idxout - local index of each global index, one must pass in an array long enough
742              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
743              idxout == NULL to determine the required length (returned in nout)
744              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
745              a second time to set the values.
746 
747     Notes:
748     Either nout or idxout may be NULL. idx and idxout may be identical.
749 
750     This is not scalable in memory usage. Each processor requires O(Nglobal) size
751     array to compute these.
752 
753     Level: advanced
754 
755     Developer Note: The manual page states that idx and idxout may be identical but the calling
756        sequence declares idx as const so it cannot be the same as idxout.
757 
758     Concepts: mapping^global to local
759 
760 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
761           ISLocalToGlobalMappingDestroy()
762 @*/
763 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
764                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
765 {
766   PetscInt       i,*globals,nf = 0,tmp,start,end;
767   const PetscBool use_hash_table = mapping->use_hash_table;
768   PetscErrorCode ierr;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
772   ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
773   globals = mapping->globals;
774   start   = mapping->globalstart;
775   end     = mapping->globalend;
776 
777 #define GTOL(g, local) do {                     \
778     if (use_hash_table) {                       \
779       PetscHashIMap(mapping->globalht,g,local); \
780     } else {                                    \
781       local = globals[g - start];               \
782     }                                           \
783   } while (0)
784 
785   if (type == IS_GTOLM_MASK) {
786     if (idxout) {
787       for (i=0; i<n; i++) {
788         if (idx[i] < 0) idxout[i] = idx[i];
789         else if (idx[i] < start) idxout[i] = -1;
790         else if (idx[i] > end)   idxout[i] = -1;
791         else                     GTOL(idx[i], idxout[i]);
792       }
793     }
794     if (nout) *nout = n;
795   } else {
796     if (idxout) {
797       for (i=0; i<n; i++) {
798         if (idx[i] < 0) continue;
799         if (idx[i] < start) continue;
800         if (idx[i] > end) continue;
801         GTOL(idx[i], tmp);
802         if (tmp < 0) continue;
803         idxout[nf++] = tmp;
804       }
805     } else {
806       for (i=0; i<n; i++) {
807         if (idx[i] < 0) continue;
808         if (idx[i] < start) continue;
809         if (idx[i] > end) continue;
810         GTOL(idx[i], tmp);
811         if (tmp < 0) continue;
812         nf++;
813       }
814     }
815     if (nout) *nout = nf;
816   }
817 #undef GTOL
818   PetscFunctionReturn(0);
819 }
820 
821 /*@C
822     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
823      each index shared by more than one processor
824 
825     Collective on ISLocalToGlobalMapping
826 
827     Input Parameters:
828 .   mapping - the mapping from local to global indexing
829 
830     Output Parameter:
831 +   nproc - number of processors that are connected to this one
832 .   proc - neighboring processors
833 .   numproc - number of indices for each subdomain (processor)
834 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
835 
836     Level: advanced
837 
838     Concepts: mapping^local to global
839 
840     Fortran Usage:
841 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
842 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
843           PetscInt indices[nproc][numprocmax],ierr)
844         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
845         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
846 
847 
848 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
849           ISLocalToGlobalMappingRestoreInfo()
850 @*/
851 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
852 {
853   PetscErrorCode ierr;
854 
855   PetscFunctionBegin;
856   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
857   if (mapping->info_cached) {
858     *nproc = mapping->info_nproc;
859     *procs = mapping->info_procs;
860     *numprocs = mapping->info_numprocs;
861     *indices = mapping->info_indices;
862   } else {
863     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
864   }
865   PetscFunctionReturn(0);
866 }
867 
868 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
869 {
870   PetscErrorCode ierr;
871   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
872   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
873   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
874   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
875   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
876   PetscInt       first_procs,first_numprocs,*first_indices;
877   MPI_Request    *recv_waits,*send_waits;
878   MPI_Status     recv_status,*send_status,*recv_statuses;
879   MPI_Comm       comm;
880   PetscBool      debug = PETSC_FALSE;
881 
882   PetscFunctionBegin;
883   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
884   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
885   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
886   if (size == 1) {
887     *nproc         = 0;
888     *procs         = NULL;
889     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
890     (*numprocs)[0] = 0;
891     ierr           = PetscNew(indices);CHKERRQ(ierr);
892     (*indices)[0]  = NULL;
893     /* save info for reuse */
894     mapping->info_nproc = *nproc;
895     mapping->info_procs = *procs;
896     mapping->info_numprocs = *numprocs;
897     mapping->info_indices = *indices;
898     mapping->info_cached = PETSC_TRUE;
899     PetscFunctionReturn(0);
900   }
901 
902   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
903 
904   /*
905     Notes on ISLocalToGlobalMappingGetBlockInfo
906 
907     globally owned node - the nodes that have been assigned to this processor in global
908            numbering, just for this routine.
909 
910     nontrivial globally owned node - node assigned to this processor that is on a subdomain
911            boundary (i.e. is has more than one local owner)
912 
913     locally owned node - node that exists on this processors subdomain
914 
915     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
916            local subdomain
917   */
918   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
919   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
920   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
921 
922   for (i=0; i<n; i++) {
923     if (lindices[i] > max) max = lindices[i];
924   }
925   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
926   Ng++;
927   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
928   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
929   scale  = Ng/size + 1;
930   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
931   rstart = scale*rank;
932 
933   /* determine ownership ranges of global indices */
934   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
935   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
936 
937   /* determine owners of each local node  */
938   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
939   for (i=0; i<n; i++) {
940     proc             = lindices[i]/scale; /* processor that globally owns this index */
941     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
942     owner[i]         = proc;
943     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
944   }
945   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
946   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
947 
948   /* inform other processors of number of messages and max length*/
949   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
950   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
951 
952   /* post receives for owned rows */
953   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
954   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
955   for (i=0; i<nrecvs; i++) {
956     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
957   }
958 
959   /* pack messages containing lists of local nodes to owners */
960   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
961   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
962   starts[0] = 0;
963   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
964   for (i=0; i<n; i++) {
965     sends[starts[owner[i]]++] = lindices[i];
966     sends[starts[owner[i]]++] = i;
967   }
968   ierr = PetscFree(owner);CHKERRQ(ierr);
969   starts[0] = 0;
970   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
971 
972   /* send the messages */
973   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
974   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
975   cnt = 0;
976   for (i=0; i<size; i++) {
977     if (nprocs[2*i]) {
978       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
979       dest[cnt] = i;
980       cnt++;
981     }
982   }
983   ierr = PetscFree(starts);CHKERRQ(ierr);
984 
985   /* wait on receives */
986   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
987   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
988   cnt  = nrecvs;
989   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
990   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
991   while (cnt) {
992     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
993     /* unpack receives into our local space */
994     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
995     source[imdex] = recv_status.MPI_SOURCE;
996     len[imdex]    = len[imdex]/2;
997     /* count how many local owners for each of my global owned indices */
998     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
999     cnt--;
1000   }
1001   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1002 
1003   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1004   nowned  = 0;
1005   nownedm = 0;
1006   for (i=0; i<ng; i++) {
1007     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1008   }
1009 
1010   /* create single array to contain rank of all local owners of each globally owned index */
1011   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1012   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1013   starts[0] = 0;
1014   for (i=1; i<ng; i++) {
1015     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1016     else starts[i] = starts[i-1];
1017   }
1018 
1019   /* for each nontrival globally owned node list all arriving processors */
1020   for (i=0; i<nrecvs; i++) {
1021     for (j=0; j<len[i]; j++) {
1022       node = recvs[2*i*nmax+2*j]-rstart;
1023       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1024     }
1025   }
1026 
1027   if (debug) { /* -----------------------------------  */
1028     starts[0] = 0;
1029     for (i=1; i<ng; i++) {
1030       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1031       else starts[i] = starts[i-1];
1032     }
1033     for (i=0; i<ng; i++) {
1034       if (nownedsenders[i] > 1) {
1035         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1036         for (j=0; j<nownedsenders[i]; j++) {
1037           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1038         }
1039         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1040       }
1041     }
1042     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1043   } /* -----------------------------------  */
1044 
1045   /* wait on original sends */
1046   if (nsends) {
1047     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1048     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1049     ierr = PetscFree(send_status);CHKERRQ(ierr);
1050   }
1051   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1052   ierr = PetscFree(sends);CHKERRQ(ierr);
1053   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1054 
1055   /* pack messages to send back to local owners */
1056   starts[0] = 0;
1057   for (i=1; i<ng; i++) {
1058     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1059     else starts[i] = starts[i-1];
1060   }
1061   nsends2 = nrecvs;
1062   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1063   for (i=0; i<nrecvs; i++) {
1064     nprocs[i] = 1;
1065     for (j=0; j<len[i]; j++) {
1066       node = recvs[2*i*nmax+2*j]-rstart;
1067       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1068     }
1069   }
1070   nt = 0;
1071   for (i=0; i<nsends2; i++) nt += nprocs[i];
1072 
1073   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1074   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1075 
1076   starts2[0] = 0;
1077   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1078   /*
1079      Each message is 1 + nprocs[i] long, and consists of
1080        (0) the number of nodes being sent back
1081        (1) the local node number,
1082        (2) the number of processors sharing it,
1083        (3) the processors sharing it
1084   */
1085   for (i=0; i<nsends2; i++) {
1086     cnt = 1;
1087     sends2[starts2[i]] = 0;
1088     for (j=0; j<len[i]; j++) {
1089       node = recvs[2*i*nmax+2*j]-rstart;
1090       if (nownedsenders[node] > 1) {
1091         sends2[starts2[i]]++;
1092         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1093         sends2[starts2[i]+cnt++] = nownedsenders[node];
1094         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1095         cnt += nownedsenders[node];
1096       }
1097     }
1098   }
1099 
1100   /* receive the message lengths */
1101   nrecvs2 = nsends;
1102   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1103   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1104   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1105   for (i=0; i<nrecvs2; i++) {
1106     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1107   }
1108 
1109   /* send the message lengths */
1110   for (i=0; i<nsends2; i++) {
1111     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1112   }
1113 
1114   /* wait on receives of lens */
1115   if (nrecvs2) {
1116     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1117     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1118     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1119   }
1120   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1121 
1122   starts3[0] = 0;
1123   nt         = 0;
1124   for (i=0; i<nrecvs2-1; i++) {
1125     starts3[i+1] = starts3[i] + lens2[i];
1126     nt          += lens2[i];
1127   }
1128   if (nrecvs2) nt += lens2[nrecvs2-1];
1129 
1130   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1131   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1132   for (i=0; i<nrecvs2; i++) {
1133     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1134   }
1135 
1136   /* send the messages */
1137   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1138   for (i=0; i<nsends2; i++) {
1139     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1140   }
1141 
1142   /* wait on receives */
1143   if (nrecvs2) {
1144     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1145     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1146     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1147   }
1148   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1149   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1150 
1151   if (debug) { /* -----------------------------------  */
1152     cnt = 0;
1153     for (i=0; i<nrecvs2; i++) {
1154       nt = recvs2[cnt++];
1155       for (j=0; j<nt; j++) {
1156         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1157         for (k=0; k<recvs2[cnt+1]; k++) {
1158           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1159         }
1160         cnt += 2 + recvs2[cnt+1];
1161         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1162       }
1163     }
1164     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1165   } /* -----------------------------------  */
1166 
1167   /* count number subdomains for each local node */
1168   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1169   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1170   cnt  = 0;
1171   for (i=0; i<nrecvs2; i++) {
1172     nt = recvs2[cnt++];
1173     for (j=0; j<nt; j++) {
1174       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1175       cnt += 2 + recvs2[cnt+1];
1176     }
1177   }
1178   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1179   *nproc    = nt;
1180   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1181   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1182   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1183   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1184   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1185   cnt  = 0;
1186   for (i=0; i<size; i++) {
1187     if (nprocs[i] > 0) {
1188       bprocs[i]        = cnt;
1189       (*procs)[cnt]    = i;
1190       (*numprocs)[cnt] = nprocs[i];
1191       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1192       cnt++;
1193     }
1194   }
1195 
1196   /* make the list of subdomains for each nontrivial local node */
1197   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1198   cnt  = 0;
1199   for (i=0; i<nrecvs2; i++) {
1200     nt = recvs2[cnt++];
1201     for (j=0; j<nt; j++) {
1202       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1203       cnt += 2 + recvs2[cnt+1];
1204     }
1205   }
1206   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1207   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1208 
1209   /* sort the node indexing by their global numbers */
1210   nt = *nproc;
1211   for (i=0; i<nt; i++) {
1212     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1213     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1214     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1215     ierr = PetscFree(tmp);CHKERRQ(ierr);
1216   }
1217 
1218   if (debug) { /* -----------------------------------  */
1219     nt = *nproc;
1220     for (i=0; i<nt; i++) {
1221       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1222       for (j=0; j<(*numprocs)[i]; j++) {
1223         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1224       }
1225       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1226     }
1227     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1228   } /* -----------------------------------  */
1229 
1230   /* wait on sends */
1231   if (nsends2) {
1232     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1233     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1234     ierr = PetscFree(send_status);CHKERRQ(ierr);
1235   }
1236 
1237   ierr = PetscFree(starts3);CHKERRQ(ierr);
1238   ierr = PetscFree(dest);CHKERRQ(ierr);
1239   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1240 
1241   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1242   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1243   ierr = PetscFree(starts);CHKERRQ(ierr);
1244   ierr = PetscFree(starts2);CHKERRQ(ierr);
1245   ierr = PetscFree(lens2);CHKERRQ(ierr);
1246 
1247   ierr = PetscFree(source);CHKERRQ(ierr);
1248   ierr = PetscFree(len);CHKERRQ(ierr);
1249   ierr = PetscFree(recvs);CHKERRQ(ierr);
1250   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1251   ierr = PetscFree(sends2);CHKERRQ(ierr);
1252 
1253   /* put the information about myself as the first entry in the list */
1254   first_procs    = (*procs)[0];
1255   first_numprocs = (*numprocs)[0];
1256   first_indices  = (*indices)[0];
1257   for (i=0; i<*nproc; i++) {
1258     if ((*procs)[i] == rank) {
1259       (*procs)[0]    = (*procs)[i];
1260       (*numprocs)[0] = (*numprocs)[i];
1261       (*indices)[0]  = (*indices)[i];
1262       (*procs)[i]    = first_procs;
1263       (*numprocs)[i] = first_numprocs;
1264       (*indices)[i]  = first_indices;
1265       break;
1266     }
1267   }
1268 
1269   /* save info for reuse */
1270   mapping->info_nproc = *nproc;
1271   mapping->info_procs = *procs;
1272   mapping->info_numprocs = *numprocs;
1273   mapping->info_indices = *indices;
1274   mapping->info_cached = PETSC_TRUE;
1275   PetscFunctionReturn(0);
1276 }
1277 
1278 /*@C
1279     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1280 
1281     Collective on ISLocalToGlobalMapping
1282 
1283     Input Parameters:
1284 .   mapping - the mapping from local to global indexing
1285 
1286     Output Parameter:
1287 +   nproc - number of processors that are connected to this one
1288 .   proc - neighboring processors
1289 .   numproc - number of indices for each processor
1290 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1291 
1292     Level: advanced
1293 
1294 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1295           ISLocalToGlobalMappingGetInfo()
1296 @*/
1297 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1298 {
1299   PetscErrorCode ierr;
1300 
1301   PetscFunctionBegin;
1302   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1303   if (mapping->info_free) {
1304     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1305     if (*indices) {
1306       PetscInt i;
1307 
1308       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1309       for (i=1; i<*nproc; i++) {
1310         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1311       }
1312       ierr = PetscFree(*indices);CHKERRQ(ierr);
1313     }
1314   }
1315   *nproc = 0;
1316   *procs = NULL;
1317   *numprocs = NULL;
1318   *indices = NULL;
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 /*@C
1323     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1324      each index shared by more than one processor
1325 
1326     Collective on ISLocalToGlobalMapping
1327 
1328     Input Parameters:
1329 .   mapping - the mapping from local to global indexing
1330 
1331     Output Parameter:
1332 +   nproc - number of processors that are connected to this one
1333 .   proc - neighboring processors
1334 .   numproc - number of indices for each subdomain (processor)
1335 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1336 
1337     Level: advanced
1338 
1339     Concepts: mapping^local to global
1340 
1341     Fortran Usage:
1342 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1343 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1344           PetscInt indices[nproc][numprocmax],ierr)
1345         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1346         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1347 
1348 
1349 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1350           ISLocalToGlobalMappingRestoreInfo()
1351 @*/
1352 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1353 {
1354   PetscErrorCode ierr;
1355   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1356 
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1359   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1360   if (bs > 1) { /* we need to expand the cached info */
1361     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1362     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1363     for (i=0; i<*nproc; i++) {
1364       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1365       for (j=0; j<bnumprocs[i]; j++) {
1366         for (k=0; k<bs; k++) {
1367           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1368         }
1369       }
1370       (*numprocs)[i] = bnumprocs[i]*bs;
1371     }
1372     mapping->info_free = PETSC_TRUE;
1373   } else {
1374     *numprocs = bnumprocs;
1375     *indices  = bindices;
1376   }
1377   PetscFunctionReturn(0);
1378 }
1379 
1380 /*@C
1381     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1382 
1383     Collective on ISLocalToGlobalMapping
1384 
1385     Input Parameters:
1386 .   mapping - the mapping from local to global indexing
1387 
1388     Output Parameter:
1389 +   nproc - number of processors that are connected to this one
1390 .   proc - neighboring processors
1391 .   numproc - number of indices for each processor
1392 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1393 
1394     Level: advanced
1395 
1396 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1397           ISLocalToGlobalMappingGetInfo()
1398 @*/
1399 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1400 {
1401   PetscErrorCode ierr;
1402 
1403   PetscFunctionBegin;
1404   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1405   PetscFunctionReturn(0);
1406 }
1407 
1408 /*@C
1409    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1410 
1411    Not Collective
1412 
1413    Input Arguments:
1414 . ltog - local to global mapping
1415 
1416    Output Arguments:
1417 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1418 
1419    Level: advanced
1420 
1421    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1422 
1423 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1424 @*/
1425 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1426 {
1427   PetscFunctionBegin;
1428   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1429   PetscValidPointer(array,2);
1430   if (ltog->bs == 1) {
1431     *array = ltog->indices;
1432   } else {
1433     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1434     const PetscInt *ii;
1435     PetscErrorCode ierr;
1436 
1437     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1438     *array = jj;
1439     k    = 0;
1440     ii   = ltog->indices;
1441     for (i=0; i<n; i++)
1442       for (j=0; j<bs; j++)
1443         jj[k++] = bs*ii[i] + j;
1444   }
1445   PetscFunctionReturn(0);
1446 }
1447 
1448 /*@C
1449    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1450 
1451    Not Collective
1452 
1453    Input Arguments:
1454 + ltog - local to global mapping
1455 - array - array of indices
1456 
1457    Level: advanced
1458 
1459 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1460 @*/
1461 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1462 {
1463   PetscFunctionBegin;
1464   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1465   PetscValidPointer(array,2);
1466   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1467 
1468   if (ltog->bs > 1) {
1469     PetscErrorCode ierr;
1470     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1471   }
1472   PetscFunctionReturn(0);
1473 }
1474 
1475 /*@C
1476    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1477 
1478    Not Collective
1479 
1480    Input Arguments:
1481 . ltog - local to global mapping
1482 
1483    Output Arguments:
1484 . array - array of indices
1485 
1486    Level: advanced
1487 
1488 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1489 @*/
1490 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1491 {
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1494   PetscValidPointer(array,2);
1495   *array = ltog->indices;
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 /*@C
1500    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1501 
1502    Not Collective
1503 
1504    Input Arguments:
1505 + ltog - local to global mapping
1506 - array - array of indices
1507 
1508    Level: advanced
1509 
1510 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1511 @*/
1512 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1513 {
1514   PetscFunctionBegin;
1515   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1516   PetscValidPointer(array,2);
1517   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1518   *array = NULL;
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 /*@C
1523    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1524 
1525    Not Collective
1526 
1527    Input Arguments:
1528 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1529 . n - number of mappings to concatenate
1530 - ltogs - local to global mappings
1531 
1532    Output Arguments:
1533 . ltogcat - new mapping
1534 
1535    Note: this currently always returns a mapping with block size of 1
1536 
1537    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1538 
1539    Level: advanced
1540 
1541 .seealso: ISLocalToGlobalMappingCreate()
1542 @*/
1543 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1544 {
1545   PetscInt       i,cnt,m,*idx;
1546   PetscErrorCode ierr;
1547 
1548   PetscFunctionBegin;
1549   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1550   if (n > 0) PetscValidPointer(ltogs,3);
1551   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1552   PetscValidPointer(ltogcat,4);
1553   for (cnt=0,i=0; i<n; i++) {
1554     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1555     cnt += m;
1556   }
1557   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1558   for (cnt=0,i=0; i<n; i++) {
1559     const PetscInt *subidx;
1560     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1561     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1562     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1563     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1564     cnt += m;
1565   }
1566   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1567   PetscFunctionReturn(0);
1568 }
1569 
1570 
1571