xref: /petsc/src/vec/is/utils/isltog.c (revision 2c9ec6dfe874b911fa49ef7e759d29a8430d6aff)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->bs*mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
142   /*  if (!isblock) { */
143     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
145     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
146     /*  } else {
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150      }*/
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   *bs = mapping->bs;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
232 /*@
233     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234     ordering and a global parallel ordering.
235 
236     Not Collective, but communicator may have more than one process
237 
238     Input Parameters:
239 +   comm - MPI communicator
240 .   bs - the block size
241 .   n - the number of local elements
242 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled by the blocksize bs
243 -   mode - see PetscCopyMode
244 
245     Output Parameter:
246 .   mapping - new mapping data structure
247 
248     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249     Level: advanced
250 
251     Concepts: mapping^local to global
252 
253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254 @*/
255 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256 {
257   PetscErrorCode ierr;
258   PetscInt       *in;
259 
260   PetscFunctionBegin;
261   if (n) PetscValidIntPointer(indices,3);
262   PetscValidPointer(mapping,4);
263 
264   *mapping = NULL;
265   ierr = ISInitializePackage();CHKERRQ(ierr);
266 
267   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
269   (*mapping)->n  = n;
270   (*mapping)->bs = bs;
271   /*
272     Do not create the global to local mapping. This is only created if
273     ISGlobalToLocalMapping() is called
274   */
275   (*mapping)->globals = 0;
276   if (mode == PETSC_COPY_VALUES) {
277     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
278     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
279     (*mapping)->indices = in;
280     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
281   } else if (mode == PETSC_OWN_POINTER) {
282     (*mapping)->indices = (PetscInt*)indices;
283     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
284   }
285   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
291 /*@
292    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
293    ordering and a global parallel ordering.
294 
295    Note Collective
296 
297    Input Parameters:
298 .  mapping - mapping data structure
299 
300    Level: advanced
301 
302 .seealso: ISLocalToGlobalMappingCreate()
303 @*/
304 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
305 {
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   if (!*mapping) PetscFunctionReturn(0);
310   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
311   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
312   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
313   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
314   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
315   *mapping = 0;
316   PetscFunctionReturn(0);
317 }
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
321 /*@
322     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
323     a new index set using the global numbering defined in an ISLocalToGlobalMapping
324     context.
325 
326     Not collective
327 
328     Input Parameters:
329 +   mapping - mapping between local and global numbering
330 -   is - index set in local numbering
331 
332     Output Parameters:
333 .   newis - index set in global numbering
334 
335     Level: advanced
336 
337     Concepts: mapping^local to global
338 
339 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
340           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
341 @*/
342 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
343 {
344   PetscErrorCode ierr;
345   PetscInt       n,*idxout;
346   const PetscInt *idxin;
347 
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
350   PetscValidHeaderSpecific(is,IS_CLASSID,2);
351   PetscValidPointer(newis,3);
352 
353   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
354   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
355   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
356   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
357   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
358   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "ISLocalToGlobalMappingApply"
364 /*@
365    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
366    and converts them to the global numbering.
367 
368    Not collective
369 
370    Input Parameters:
371 +  mapping - the local to global mapping context
372 .  N - number of integers
373 -  in - input indices in local numbering
374 
375    Output Parameter:
376 .  out - indices in global numbering
377 
378    Notes:
379    The in and out array parameters may be identical.
380 
381    Level: advanced
382 
383 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
384           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
385           AOPetscToApplication(), ISGlobalToLocalMappingApply()
386 
387     Concepts: mapping^local to global
388 @*/
389 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
390 {
391   PetscInt       i,bs = mapping->bs,Nmax = bs*mapping->n;
392   const PetscInt *idx = mapping->indices;
393 
394   PetscFunctionBegin;
395   if (bs == 1) {
396     for (i=0; i<N; i++) {
397       if (in[i] < 0) {
398         out[i] = in[i];
399         continue;
400       }
401       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
402       out[i] = idx[in[i]];
403     }
404   } else {
405     for (i=0; i<N; i++) {
406       if (in[i] < 0) {
407         out[i] = in[i];
408         continue;
409       }
410       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
411       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
412     }
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
419 /*@
420    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local numbering  and converts them to the global numbering.
421 
422    Not collective
423 
424    Input Parameters:
425 +  mapping - the local to global mapping context
426 .  N - number of integers
427 -  in - input indices in local numbering
428 
429    Output Parameter:
430 .  out - indices in global numbering
431 
432    Notes:
433    The in and out array parameters may be identical.
434 
435    Level: advanced
436 
437 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
438           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
439           AOPetscToApplication(), ISGlobalToLocalMappingApply()
440 
441     Concepts: mapping^local to global
442 @*/
443 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
444 {
445   PetscInt       i,Nmax = mapping->n;
446   const PetscInt *idx = mapping->indices;
447 
448   PetscFunctionBegin;
449   for (i=0; i<N; i++) {
450     if (in[i] < 0) {
451       out[i] = in[i];
452       continue;
453     }
454     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
455     out[i] = idx[in[i]];
456   }
457   PetscFunctionReturn(0);
458 }
459 
460 /* -----------------------------------------------------------------------------------------*/
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
464 /*
465     Creates the global fields in the ISLocalToGlobalMapping structure
466 */
467 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
468 {
469   PetscErrorCode ierr;
470   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
471 
472   PetscFunctionBegin;
473   end   = 0;
474   start = PETSC_MAX_INT;
475 
476   for (i=0; i<n; i++) {
477     if (idx[i] < 0) continue;
478     if (idx[i] < start) start = idx[i];
479     if (idx[i] > end)   end   = idx[i];
480   }
481   if (start > end) {start = 0; end = -1;}
482   mapping->globalstart = start;
483   mapping->globalend   = end;
484 
485   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
486   mapping->globals = globals;
487   for (i=0; i<end-start+1; i++) globals[i] = -1;
488   for (i=0; i<n; i++) {
489     if (idx[i] < 0) continue;
490     globals[idx[i] - start] = i;
491   }
492 
493   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "ISGlobalToLocalMappingApply"
499 /*@
500     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
501     specified with a global numbering.
502 
503     Not collective
504 
505     Input Parameters:
506 +   mapping - mapping between local and global numbering
507 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
508            IS_GTOLM_DROP - drops the indices with no local value from the output list
509 .   n - number of global indices to map
510 -   idx - global indices to map
511 
512     Output Parameters:
513 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
514 -   idxout - local index of each global index, one must pass in an array long enough
515              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
516              idxout == NULL to determine the required length (returned in nout)
517              and then allocate the required space and call ISGlobalToLocalMappingApply()
518              a second time to set the values.
519 
520     Notes:
521     Either nout or idxout may be NULL. idx and idxout may be identical.
522 
523     This is not scalable in memory usage. Each processor requires O(Nglobal) size
524     array to compute these.
525 
526     Level: advanced
527 
528     Developer Note: The manual page states that idx and idxout may be identical but the calling
529        sequence declares idx as const so it cannot be the same as idxout.
530 
531     Concepts: mapping^global to local
532 
533 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
534           ISLocalToGlobalMappingDestroy()
535 @*/
536 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
537                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
538 {
539   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
540   PetscErrorCode ierr;
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
544   if (!mapping->globals) {
545     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
546   }
547   globals = mapping->globals;
548   start   = mapping->globalstart;
549   end     = mapping->globalend;
550   bs      = mapping->bs;
551 
552   if (type == IS_GTOLM_MASK) {
553     if (idxout) {
554       for (i=0; i<n; i++) {
555         if (idx[i] < 0) idxout[i] = idx[i];
556         else if (idx[i] < bs*start) idxout[i] = -1;
557         else if (idx[i] > bs*end)   idxout[i] = -1;
558         else                        idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
559       }
560     }
561     if (nout) *nout = n;
562   } else {
563     if (idxout) {
564       for (i=0; i<n; i++) {
565         if (idx[i] < 0) continue;
566         if (idx[i] < bs*start) continue;
567         if (idx[i] > bs*end) continue;
568         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
569         if (tmp < 0) continue;
570         idxout[nf++] = tmp;
571       }
572     } else {
573       for (i=0; i<n; i++) {
574         if (idx[i] < 0) continue;
575         if (idx[i] < bs*start) continue;
576         if (idx[i] > bs*end) continue;
577         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
578         if (tmp < 0) continue;
579         nf++;
580       }
581     }
582     if (nout) *nout = nf;
583   }
584   PetscFunctionReturn(0);
585 }
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
589 /*@
590     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
591     specified with a block global numbering.
592 
593     Not collective
594 
595     Input Parameters:
596 +   mapping - mapping between local and global numbering
597 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
598            IS_GTOLM_DROP - drops the indices with no local value from the output list
599 .   n - number of global indices to map
600 -   idx - global indices to map
601 
602     Output Parameters:
603 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
604 -   idxout - local index of each global index, one must pass in an array long enough
605              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
606              idxout == NULL to determine the required length (returned in nout)
607              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
608              a second time to set the values.
609 
610     Notes:
611     Either nout or idxout may be NULL. idx and idxout may be identical.
612 
613     This is not scalable in memory usage. Each processor requires O(Nglobal) size
614     array to compute these.
615 
616     Level: advanced
617 
618     Developer Note: The manual page states that idx and idxout may be identical but the calling
619        sequence declares idx as const so it cannot be the same as idxout.
620 
621     Concepts: mapping^global to local
622 
623 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
624           ISLocalToGlobalMappingDestroy()
625 @*/
626 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
627                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
628 {
629   PetscInt       i,*globals,nf = 0,tmp,start,end;
630   PetscErrorCode ierr;
631 
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
634   if (!mapping->globals) {
635     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
636   }
637   globals = mapping->globals;
638   start   = mapping->globalstart;
639   end     = mapping->globalend;
640 
641   if (type == IS_GTOLM_MASK) {
642     if (idxout) {
643       for (i=0; i<n; i++) {
644         if (idx[i] < 0) idxout[i] = idx[i];
645         else if (idx[i] < start) idxout[i] = -1;
646         else if (idx[i] > end)   idxout[i] = -1;
647         else                     idxout[i] = globals[idx[i] - start];
648       }
649     }
650     if (nout) *nout = n;
651   } else {
652     if (idxout) {
653       for (i=0; i<n; i++) {
654         if (idx[i] < 0) continue;
655         if (idx[i] < start) continue;
656         if (idx[i] > end) continue;
657         tmp = globals[idx[i] - start];
658         if (tmp < 0) continue;
659         idxout[nf++] = tmp;
660       }
661     } else {
662       for (i=0; i<n; i++) {
663         if (idx[i] < 0) continue;
664         if (idx[i] < start) continue;
665         if (idx[i] > end) continue;
666         tmp = globals[idx[i] - start];
667         if (tmp < 0) continue;
668         nf++;
669       }
670     }
671     if (nout) *nout = nf;
672   }
673   PetscFunctionReturn(0);
674 }
675 
676 #undef __FUNCT__
677 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
678 /*@C
679     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
680      each index shared by more than one processor
681 
682     Collective on ISLocalToGlobalMapping
683 
684     Input Parameters:
685 .   mapping - the mapping from local to global indexing
686 
687     Output Parameter:
688 +   nproc - number of processors that are connected to this one
689 .   proc - neighboring processors
690 .   numproc - number of indices for each subdomain (processor)
691 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
692 
693     Level: advanced
694 
695     Concepts: mapping^local to global
696 
697     Fortran Usage:
698 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
699 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
700           PetscInt indices[nproc][numprocmax],ierr)
701         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
702         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
703 
704 
705 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
706           ISLocalToGlobalMappingRestoreInfo()
707 @*/
708 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
709 {
710   PetscErrorCode ierr;
711   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
712   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
713   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
714   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
715   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
716   PetscInt       first_procs,first_numprocs,*first_indices;
717   MPI_Request    *recv_waits,*send_waits;
718   MPI_Status     recv_status,*send_status,*recv_statuses;
719   MPI_Comm       comm;
720   PetscBool      debug = PETSC_FALSE;
721 
722   PetscFunctionBegin;
723   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
724   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
725   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
726   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
727   if (size == 1) {
728     *nproc         = 0;
729     *procs         = NULL;
730     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
731     (*numprocs)[0] = 0;
732     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
733     (*indices)[0]  = NULL;
734     PetscFunctionReturn(0);
735   }
736 
737   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
738 
739   /*
740     Notes on ISLocalToGlobalMappingGetBlockInfo
741 
742     globally owned node - the nodes that have been assigned to this processor in global
743            numbering, just for this routine.
744 
745     nontrivial globally owned node - node assigned to this processor that is on a subdomain
746            boundary (i.e. is has more than one local owner)
747 
748     locally owned node - node that exists on this processors subdomain
749 
750     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
751            local subdomain
752   */
753   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
754   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
755   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
756 
757   for (i=0; i<n; i++) {
758     if (lindices[i] > max) max = lindices[i];
759   }
760   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
761   Ng++;
762   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
763   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
764   scale  = Ng/size + 1;
765   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
766   rstart = scale*rank;
767 
768   /* determine ownership ranges of global indices */
769   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
770   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
771 
772   /* determine owners of each local node  */
773   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
774   for (i=0; i<n; i++) {
775     proc             = lindices[i]/scale; /* processor that globally owns this index */
776     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
777     owner[i]         = proc;
778     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
779   }
780   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
781   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
782 
783   /* inform other processors of number of messages and max length*/
784   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
785   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
786 
787   /* post receives for owned rows */
788   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
789   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
790   for (i=0; i<nrecvs; i++) {
791     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
792   }
793 
794   /* pack messages containing lists of local nodes to owners */
795   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
796   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
797   starts[0] = 0;
798   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
799   for (i=0; i<n; i++) {
800     sends[starts[owner[i]]++] = lindices[i];
801     sends[starts[owner[i]]++] = i;
802   }
803   ierr = PetscFree(owner);CHKERRQ(ierr);
804   starts[0] = 0;
805   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
806 
807   /* send the messages */
808   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
809   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
810   cnt = 0;
811   for (i=0; i<size; i++) {
812     if (nprocs[2*i]) {
813       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
814       dest[cnt] = i;
815       cnt++;
816     }
817   }
818   ierr = PetscFree(starts);CHKERRQ(ierr);
819 
820   /* wait on receives */
821   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
822   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
823   cnt  = nrecvs;
824   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
825   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
826   while (cnt) {
827     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
828     /* unpack receives into our local space */
829     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
830     source[imdex] = recv_status.MPI_SOURCE;
831     len[imdex]    = len[imdex]/2;
832     /* count how many local owners for each of my global owned indices */
833     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
834     cnt--;
835   }
836   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
837 
838   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
839   nowned  = 0;
840   nownedm = 0;
841   for (i=0; i<ng; i++) {
842     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
843   }
844 
845   /* create single array to contain rank of all local owners of each globally owned index */
846   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
847   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
848   starts[0] = 0;
849   for (i=1; i<ng; i++) {
850     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
851     else starts[i] = starts[i-1];
852   }
853 
854   /* for each nontrival globally owned node list all arriving processors */
855   for (i=0; i<nrecvs; i++) {
856     for (j=0; j<len[i]; j++) {
857       node = recvs[2*i*nmax+2*j]-rstart;
858       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
859     }
860   }
861 
862   if (debug) { /* -----------------------------------  */
863     starts[0] = 0;
864     for (i=1; i<ng; i++) {
865       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
866       else starts[i] = starts[i-1];
867     }
868     for (i=0; i<ng; i++) {
869       if (nownedsenders[i] > 1) {
870         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
871         for (j=0; j<nownedsenders[i]; j++) {
872           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
873         }
874         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
875       }
876     }
877     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
878   } /* -----------------------------------  */
879 
880   /* wait on original sends */
881   if (nsends) {
882     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
883     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
884     ierr = PetscFree(send_status);CHKERRQ(ierr);
885   }
886   ierr = PetscFree(send_waits);CHKERRQ(ierr);
887   ierr = PetscFree(sends);CHKERRQ(ierr);
888   ierr = PetscFree(nprocs);CHKERRQ(ierr);
889 
890   /* pack messages to send back to local owners */
891   starts[0] = 0;
892   for (i=1; i<ng; i++) {
893     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
894     else starts[i] = starts[i-1];
895   }
896   nsends2 = nrecvs;
897   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
898   for (i=0; i<nrecvs; i++) {
899     nprocs[i] = 1;
900     for (j=0; j<len[i]; j++) {
901       node = recvs[2*i*nmax+2*j]-rstart;
902       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
903     }
904   }
905   nt = 0;
906   for (i=0; i<nsends2; i++) nt += nprocs[i];
907 
908   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
909   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
910 
911   starts2[0] = 0;
912   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
913   /*
914      Each message is 1 + nprocs[i] long, and consists of
915        (0) the number of nodes being sent back
916        (1) the local node number,
917        (2) the number of processors sharing it,
918        (3) the processors sharing it
919   */
920   for (i=0; i<nsends2; i++) {
921     cnt = 1;
922     sends2[starts2[i]] = 0;
923     for (j=0; j<len[i]; j++) {
924       node = recvs[2*i*nmax+2*j]-rstart;
925       if (nownedsenders[node] > 1) {
926         sends2[starts2[i]]++;
927         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
928         sends2[starts2[i]+cnt++] = nownedsenders[node];
929         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
930         cnt += nownedsenders[node];
931       }
932     }
933   }
934 
935   /* receive the message lengths */
936   nrecvs2 = nsends;
937   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
938   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
939   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
940   for (i=0; i<nrecvs2; i++) {
941     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
942   }
943 
944   /* send the message lengths */
945   for (i=0; i<nsends2; i++) {
946     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
947   }
948 
949   /* wait on receives of lens */
950   if (nrecvs2) {
951     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
952     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
953     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
954   }
955   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
956 
957   starts3[0] = 0;
958   nt         = 0;
959   for (i=0; i<nrecvs2-1; i++) {
960     starts3[i+1] = starts3[i] + lens2[i];
961     nt          += lens2[i];
962   }
963   if (nrecvs2) nt += lens2[nrecvs2-1];
964 
965   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
966   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
967   for (i=0; i<nrecvs2; i++) {
968     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
969   }
970 
971   /* send the messages */
972   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
973   for (i=0; i<nsends2; i++) {
974     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
975   }
976 
977   /* wait on receives */
978   if (nrecvs2) {
979     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
980     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
981     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
982   }
983   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
984   ierr = PetscFree(nprocs);CHKERRQ(ierr);
985 
986   if (debug) { /* -----------------------------------  */
987     cnt = 0;
988     for (i=0; i<nrecvs2; i++) {
989       nt = recvs2[cnt++];
990       for (j=0; j<nt; j++) {
991         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
992         for (k=0; k<recvs2[cnt+1]; k++) {
993           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
994         }
995         cnt += 2 + recvs2[cnt+1];
996         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
997       }
998     }
999     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1000   } /* -----------------------------------  */
1001 
1002   /* count number subdomains for each local node */
1003   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1004   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1005   cnt  = 0;
1006   for (i=0; i<nrecvs2; i++) {
1007     nt = recvs2[cnt++];
1008     for (j=0; j<nt; j++) {
1009       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1010       cnt += 2 + recvs2[cnt+1];
1011     }
1012   }
1013   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1014   *nproc    = nt;
1015   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
1016   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
1017   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
1018   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1019   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1020   cnt       = 0;
1021   for (i=0; i<size; i++) {
1022     if (nprocs[i] > 0) {
1023       bprocs[i]        = cnt;
1024       (*procs)[cnt]    = i;
1025       (*numprocs)[cnt] = nprocs[i];
1026       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1027       cnt++;
1028     }
1029   }
1030 
1031   /* make the list of subdomains for each nontrivial local node */
1032   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1033   cnt  = 0;
1034   for (i=0; i<nrecvs2; i++) {
1035     nt = recvs2[cnt++];
1036     for (j=0; j<nt; j++) {
1037       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1038       cnt += 2 + recvs2[cnt+1];
1039     }
1040   }
1041   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1042   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1043 
1044   /* sort the node indexing by their global numbers */
1045   nt = *nproc;
1046   for (i=0; i<nt; i++) {
1047     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
1048     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1049     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1050     ierr = PetscFree(tmp);CHKERRQ(ierr);
1051   }
1052 
1053   if (debug) { /* -----------------------------------  */
1054     nt = *nproc;
1055     for (i=0; i<nt; i++) {
1056       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1057       for (j=0; j<(*numprocs)[i]; j++) {
1058         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1059       }
1060       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1061     }
1062     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1063   } /* -----------------------------------  */
1064 
1065   /* wait on sends */
1066   if (nsends2) {
1067     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1068     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1069     ierr = PetscFree(send_status);CHKERRQ(ierr);
1070   }
1071 
1072   ierr = PetscFree(starts3);CHKERRQ(ierr);
1073   ierr = PetscFree(dest);CHKERRQ(ierr);
1074   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1075 
1076   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1077   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1078   ierr = PetscFree(starts);CHKERRQ(ierr);
1079   ierr = PetscFree(starts2);CHKERRQ(ierr);
1080   ierr = PetscFree(lens2);CHKERRQ(ierr);
1081 
1082   ierr = PetscFree(source);CHKERRQ(ierr);
1083   ierr = PetscFree(len);CHKERRQ(ierr);
1084   ierr = PetscFree(recvs);CHKERRQ(ierr);
1085   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1086   ierr = PetscFree(sends2);CHKERRQ(ierr);
1087 
1088   /* put the information about myself as the first entry in the list */
1089   first_procs    = (*procs)[0];
1090   first_numprocs = (*numprocs)[0];
1091   first_indices  = (*indices)[0];
1092   for (i=0; i<*nproc; i++) {
1093     if ((*procs)[i] == rank) {
1094       (*procs)[0]    = (*procs)[i];
1095       (*numprocs)[0] = (*numprocs)[i];
1096       (*indices)[0]  = (*indices)[i];
1097       (*procs)[i]    = first_procs;
1098       (*numprocs)[i] = first_numprocs;
1099       (*indices)[i]  = first_indices;
1100       break;
1101     }
1102   }
1103   PetscFunctionReturn(0);
1104 }
1105 
1106 #undef __FUNCT__
1107 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1108 /*@C
1109     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1110 
1111     Collective on ISLocalToGlobalMapping
1112 
1113     Input Parameters:
1114 .   mapping - the mapping from local to global indexing
1115 
1116     Output Parameter:
1117 +   nproc - number of processors that are connected to this one
1118 .   proc - neighboring processors
1119 .   numproc - number of indices for each processor
1120 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1121 
1122     Level: advanced
1123 
1124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1125           ISLocalToGlobalMappingGetInfo()
1126 @*/
1127 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1128 {
1129   PetscErrorCode ierr;
1130   PetscInt       i;
1131 
1132   PetscFunctionBegin;
1133   ierr = PetscFree(*procs);CHKERRQ(ierr);
1134   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1135   if (*indices) {
1136     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1137     for (i=1; i<*nproc; i++) {
1138       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1139     }
1140     ierr = PetscFree(*indices);CHKERRQ(ierr);
1141   }
1142   PetscFunctionReturn(0);
1143 }
1144 
1145 #undef __FUNCT__
1146 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1147 /*@C
1148     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1149      each index shared by more than one processor
1150 
1151     Collective on ISLocalToGlobalMapping
1152 
1153     Input Parameters:
1154 .   mapping - the mapping from local to global indexing
1155 
1156     Output Parameter:
1157 +   nproc - number of processors that are connected to this one
1158 .   proc - neighboring processors
1159 .   numproc - number of indices for each subdomain (processor)
1160 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1161 
1162     Level: advanced
1163 
1164     Concepts: mapping^local to global
1165 
1166     Fortran Usage:
1167 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1168 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1169           PetscInt indices[nproc][numprocmax],ierr)
1170         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1171         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1172 
1173 
1174 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1175           ISLocalToGlobalMappingRestoreInfo()
1176 @*/
1177 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1178 {
1179   PetscErrorCode ierr;
1180   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;
1181 
1182   PetscFunctionBegin;
1183   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1184   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr);
1185   ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1186   for (i=0; i<*nproc; i++) {
1187     ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr);
1188     for (j=0; j<(*numprocs)[i]; j++) {
1189       for (k=0; k<bs; k++) {
1190         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1191       }
1192     }
1193     (*numprocs)[i] *= bs;
1194   }
1195   if (bindices) {
1196     ierr = PetscFree(bindices[0]);CHKERRQ(ierr);
1197     for (i=1; i<*nproc; i++) {
1198       ierr = PetscFree(bindices[i]);CHKERRQ(ierr);
1199     }
1200     ierr = PetscFree(bindices);CHKERRQ(ierr);
1201   }
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1207 /*@C
1208     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1209 
1210     Collective on ISLocalToGlobalMapping
1211 
1212     Input Parameters:
1213 .   mapping - the mapping from local to global indexing
1214 
1215     Output Parameter:
1216 +   nproc - number of processors that are connected to this one
1217 .   proc - neighboring processors
1218 .   numproc - number of indices for each processor
1219 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1220 
1221     Level: advanced
1222 
1223 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1224           ISLocalToGlobalMappingGetInfo()
1225 @*/
1226 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1227 {
1228   PetscErrorCode ierr;
1229 
1230   PetscFunctionBegin;
1231   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1232   PetscFunctionReturn(0);
1233 }
1234 
1235 #undef __FUNCT__
1236 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1237 /*@C
1238    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1239 
1240    Not Collective
1241 
1242    Input Arguments:
1243 . ltog - local to global mapping
1244 
1245    Output Arguments:
1246 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1247 
1248    Level: advanced
1249 
1250    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1251 
1252 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1253 @*/
1254 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1255 {
1256   PetscFunctionBegin;
1257   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1258   PetscValidPointer(array,2);
1259   if (ltog->bs == 1) {
1260     *array = ltog->indices;
1261   } else {
1262     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1263     const PetscInt *ii;
1264     PetscErrorCode ierr;
1265 
1266     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1267     *array = jj;
1268     k    = 0;
1269     ii   = ltog->indices;
1270     for (i=0; i<n; i++)
1271       for (j=0; j<bs; j++)
1272         jj[k++] = bs*ii[i] + j;
1273   }
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 #undef __FUNCT__
1278 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1279 /*@C
1280    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1281 
1282    Not Collective
1283 
1284    Input Arguments:
1285 + ltog - local to global mapping
1286 - array - array of indices
1287 
1288    Level: advanced
1289 
1290 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1291 @*/
1292 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1293 {
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1296   PetscValidPointer(array,2);
1297   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1298 
1299   if (ltog->bs > 1) {
1300     PetscErrorCode ierr;
1301     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1302   }
1303   PetscFunctionReturn(0);
1304 }
1305 
1306 #undef __FUNCT__
1307 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1308 /*@C
1309    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1310 
1311    Not Collective
1312 
1313    Input Arguments:
1314 . ltog - local to global mapping
1315 
1316    Output Arguments:
1317 . array - array of indices
1318 
1319    Level: advanced
1320 
1321 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1322 @*/
1323 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1324 {
1325   PetscFunctionBegin;
1326   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1327   PetscValidPointer(array,2);
1328   *array = ltog->indices;
1329   PetscFunctionReturn(0);
1330 }
1331 
1332 #undef __FUNCT__
1333 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1334 /*@C
1335    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1336 
1337    Not Collective
1338 
1339    Input Arguments:
1340 + ltog - local to global mapping
1341 - array - array of indices
1342 
1343    Level: advanced
1344 
1345 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1346 @*/
1347 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1348 {
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1351   PetscValidPointer(array,2);
1352   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1353   *array = NULL;
1354   PetscFunctionReturn(0);
1355 }
1356 
1357 #undef __FUNCT__
1358 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1359 /*@C
1360    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1361 
1362    Not Collective
1363 
1364    Input Arguments:
1365 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1366 . n - number of mappings to concatenate
1367 - ltogs - local to global mappings
1368 
1369    Output Arguments:
1370 . ltogcat - new mapping
1371 
1372    Note: this currently always returns a mapping with block size of 1
1373 
1374    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1375 
1376    Level: advanced
1377 
1378 .seealso: ISLocalToGlobalMappingCreate()
1379 @*/
1380 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1381 {
1382   PetscInt       i,cnt,m,*idx;
1383   PetscErrorCode ierr;
1384 
1385   PetscFunctionBegin;
1386   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1387   if (n > 0) PetscValidPointer(ltogs,3);
1388   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1389   PetscValidPointer(ltogcat,4);
1390   for (cnt=0,i=0; i<n; i++) {
1391     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1392     cnt += m;
1393   }
1394   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1395   for (cnt=0,i=0; i<n; i++) {
1396     const PetscInt *subidx;
1397     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1398     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1399     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1400     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1401     cnt += m;
1402   }
1403   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1404   PetscFunctionReturn(0);
1405 }
1406 
1407 
1408