xref: /petsc/src/vec/is/utils/isltog.c (revision d4fe737eb3e7e3e6f664356c79de84ba6cc7e555)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->bs*mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
142   /*  if (!isblock) { */
143     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
145     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
146     /*  } else {
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150      }*/
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   *bs = mapping->bs;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
232 /*@
233     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234     ordering and a global parallel ordering.
235 
236     Not Collective, but communicator may have more than one process
237 
238     Input Parameters:
239 +   comm - MPI communicator
240 .   bs - the block size
241 .   n - the number of local elements
242 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled by the blocksize bs
243 -   mode - see PetscCopyMode
244 
245     Output Parameter:
246 .   mapping - new mapping data structure
247 
248     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249     Level: advanced
250 
251     Concepts: mapping^local to global
252 
253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254 @*/
255 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256 {
257   PetscErrorCode ierr;
258   PetscInt       *in;
259 
260   PetscFunctionBegin;
261   if (n) PetscValidIntPointer(indices,3);
262   PetscValidPointer(mapping,4);
263 
264   *mapping = NULL;
265   ierr = ISInitializePackage();CHKERRQ(ierr);
266 
267   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
269   (*mapping)->n  = n;
270   (*mapping)->bs = bs;
271   /*
272     Do not create the global to local mapping. This is only created if
273     ISGlobalToLocalMapping() is called
274   */
275   (*mapping)->globals = 0;
276   if (mode == PETSC_COPY_VALUES) {
277     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
278     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
279     (*mapping)->indices = in;
280     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
281   } else if (mode == PETSC_OWN_POINTER) {
282     (*mapping)->indices = (PetscInt*)indices;
283     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
284   }
285   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
291 /*@
292    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
293    ordering and a global parallel ordering.
294 
295    Note Collective
296 
297    Input Parameters:
298 .  mapping - mapping data structure
299 
300    Level: advanced
301 
302 .seealso: ISLocalToGlobalMappingCreate()
303 @*/
304 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
305 {
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   if (!*mapping) PetscFunctionReturn(0);
310   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
311   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
312   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
313   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
314   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
315   *mapping = 0;
316   PetscFunctionReturn(0);
317 }
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
321 /*@
322     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
323     a new index set using the global numbering defined in an ISLocalToGlobalMapping
324     context.
325 
326     Not collective
327 
328     Input Parameters:
329 +   mapping - mapping between local and global numbering
330 -   is - index set in local numbering
331 
332     Output Parameters:
333 .   newis - index set in global numbering
334 
335     Level: advanced
336 
337     Concepts: mapping^local to global
338 
339 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
340           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
341 @*/
342 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
343 {
344   PetscErrorCode ierr;
345   PetscInt       n,*idxout;
346   const PetscInt *idxin;
347 
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
350   PetscValidHeaderSpecific(is,IS_CLASSID,2);
351   PetscValidPointer(newis,3);
352 
353   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
354   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
355   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
356   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
357   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
358   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "ISLocalToGlobalMappingApply"
364 /*@
365    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
366    and converts them to the global numbering.
367 
368    Not collective
369 
370    Input Parameters:
371 +  mapping - the local to global mapping context
372 .  N - number of integers
373 -  in - input indices in local numbering
374 
375    Output Parameter:
376 .  out - indices in global numbering
377 
378    Notes:
379    The in and out array parameters may be identical.
380 
381    Level: advanced
382 
383 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
384           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
385           AOPetscToApplication(), ISGlobalToLocalMappingApply()
386 
387     Concepts: mapping^local to global
388 @*/
389 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
390 {
391   PetscInt       i,bs = mapping->bs,Nmax = bs*mapping->n;
392   const PetscInt *idx = mapping->indices;
393 
394   PetscFunctionBegin;
395   if (bs == 1) {
396     for (i=0; i<N; i++) {
397       if (in[i] < 0) {
398         out[i] = in[i];
399         continue;
400       }
401       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
402       out[i] = idx[in[i]];
403     }
404   } else {
405     for (i=0; i<N; i++) {
406       if (in[i] < 0) {
407         out[i] = in[i];
408         continue;
409       }
410       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
411       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
412     }
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
419 /*@
420    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local numbering  and converts them to the global numbering.
421 
422    Not collective
423 
424    Input Parameters:
425 +  mapping - the local to global mapping context
426 .  N - number of integers
427 -  in - input indices in local numbering
428 
429    Output Parameter:
430 .  out - indices in global numbering
431 
432    Notes:
433    The in and out array parameters may be identical.
434 
435    Level: advanced
436 
437 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
438           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
439           AOPetscToApplication(), ISGlobalToLocalMappingApply()
440 
441     Concepts: mapping^local to global
442 @*/
443 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
444 {
445   PetscInt       i,Nmax = mapping->n;
446   const PetscInt *idx = mapping->indices;
447 
448   PetscFunctionBegin;
449   for (i=0; i<N; i++) {
450     if (in[i] < 0) {
451       out[i] = in[i];
452       continue;
453     }
454     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
455     out[i] = idx[in[i]];
456   }
457   PetscFunctionReturn(0);
458 }
459 
460 /* -----------------------------------------------------------------------------------------*/
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
464 /*
465     Creates the global fields in the ISLocalToGlobalMapping structure
466 */
467 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
468 {
469   PetscErrorCode ierr;
470   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
471 
472   PetscFunctionBegin;
473   end   = 0;
474   start = PETSC_MAX_INT;
475 
476   for (i=0; i<n; i++) {
477     if (idx[i] < 0) continue;
478     if (idx[i] < start) start = idx[i];
479     if (idx[i] > end)   end   = idx[i];
480   }
481   if (start > end) {start = 0; end = -1;}
482   mapping->globalstart = start;
483   mapping->globalend   = end;
484 
485   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
486   mapping->globals = globals;
487   for (i=0; i<end-start+1; i++) globals[i] = -1;
488   for (i=0; i<n; i++) {
489     if (idx[i] < 0) continue;
490     globals[idx[i] - start] = i;
491   }
492 
493   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "ISGlobalToLocalMappingApply"
499 /*@
500     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
501     specified with a global numbering.
502 
503     Not collective
504 
505     Input Parameters:
506 +   mapping - mapping between local and global numbering
507 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
508            IS_GTOLM_DROP - drops the indices with no local value from the output list
509 .   n - number of global indices to map
510 -   idx - global indices to map
511 
512     Output Parameters:
513 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
514 -   idxout - local index of each global index, one must pass in an array long enough
515              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
516              idxout == NULL to determine the required length (returned in nout)
517              and then allocate the required space and call ISGlobalToLocalMappingApply()
518              a second time to set the values.
519 
520     Notes:
521     Either nout or idxout may be NULL. idx and idxout may be identical.
522 
523     This is not scalable in memory usage. Each processor requires O(Nglobal) size
524     array to compute these.
525 
526     Level: advanced
527 
528     Developer Note: The manual page states that idx and idxout may be identical but the calling
529        sequence declares idx as const so it cannot be the same as idxout.
530 
531     Concepts: mapping^global to local
532 
533 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
534           ISLocalToGlobalMappingDestroy()
535 @*/
536 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
537                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
538 {
539   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
540   PetscErrorCode ierr;
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
544   if (!mapping->globals) {
545     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
546   }
547   globals = mapping->globals;
548   start   = mapping->globalstart;
549   end     = mapping->globalend;
550   bs      = mapping->bs;
551 
552   if (type == IS_GTOLM_MASK) {
553     if (idxout) {
554       for (i=0; i<n; i++) {
555         if (idx[i] < 0) idxout[i] = idx[i];
556         else if (idx[i] < bs*start) idxout[i] = -1;
557         else if (idx[i] > bs*end)   idxout[i] = -1;
558         else                        idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
559       }
560     }
561     if (nout) *nout = n;
562   } else {
563     if (idxout) {
564       for (i=0; i<n; i++) {
565         if (idx[i] < 0) continue;
566         if (idx[i] < bs*start) continue;
567         if (idx[i] > bs*end) continue;
568         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
569         if (tmp < 0) continue;
570         idxout[nf++] = tmp;
571       }
572     } else {
573       for (i=0; i<n; i++) {
574         if (idx[i] < 0) continue;
575         if (idx[i] < bs*start) continue;
576         if (idx[i] > bs*end) continue;
577         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
578         if (tmp < 0) continue;
579         nf++;
580       }
581     }
582     if (nout) *nout = nf;
583   }
584   PetscFunctionReturn(0);
585 }
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
589 /*@
590     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
591     a new index set using the local numbering defined in an ISLocalToGlobalMapping
592     context.
593 
594     Not collective
595 
596     Input Parameters:
597 +   mapping - mapping between local and global numbering
598 -   is - index set in global numbering
599 
600     Output Parameters:
601 .   newis - index set in local numbering
602 
603     Level: advanced
604 
605     Concepts: mapping^local to global
606 
607 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
608           ISLocalToGlobalMappingDestroy()
609 @*/
610 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
611 {
612   PetscErrorCode ierr;
613   PetscInt       n,nout,*idxout;
614   const PetscInt *idxin;
615 
616   PetscFunctionBegin;
617   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
618   PetscValidHeaderSpecific(is,IS_CLASSID,3);
619   PetscValidPointer(newis,4);
620 
621   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
622   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
623   if (type == IS_GTOLM_MASK) {
624     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
625   } else {
626     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
627     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
628   }
629   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
630   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
631   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
632   PetscFunctionReturn(0);
633 }
634 
635 #undef __FUNCT__
636 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
637 /*@
638     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
639     specified with a block global numbering.
640 
641     Not collective
642 
643     Input Parameters:
644 +   mapping - mapping between local and global numbering
645 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
646            IS_GTOLM_DROP - drops the indices with no local value from the output list
647 .   n - number of global indices to map
648 -   idx - global indices to map
649 
650     Output Parameters:
651 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
652 -   idxout - local index of each global index, one must pass in an array long enough
653              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
654              idxout == NULL to determine the required length (returned in nout)
655              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
656              a second time to set the values.
657 
658     Notes:
659     Either nout or idxout may be NULL. idx and idxout may be identical.
660 
661     This is not scalable in memory usage. Each processor requires O(Nglobal) size
662     array to compute these.
663 
664     Level: advanced
665 
666     Developer Note: The manual page states that idx and idxout may be identical but the calling
667        sequence declares idx as const so it cannot be the same as idxout.
668 
669     Concepts: mapping^global to local
670 
671 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
672           ISLocalToGlobalMappingDestroy()
673 @*/
674 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
675                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
676 {
677   PetscInt       i,*globals,nf = 0,tmp,start,end;
678   PetscErrorCode ierr;
679 
680   PetscFunctionBegin;
681   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
682   if (!mapping->globals) {
683     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
684   }
685   globals = mapping->globals;
686   start   = mapping->globalstart;
687   end     = mapping->globalend;
688 
689   if (type == IS_GTOLM_MASK) {
690     if (idxout) {
691       for (i=0; i<n; i++) {
692         if (idx[i] < 0) idxout[i] = idx[i];
693         else if (idx[i] < start) idxout[i] = -1;
694         else if (idx[i] > end)   idxout[i] = -1;
695         else                     idxout[i] = globals[idx[i] - start];
696       }
697     }
698     if (nout) *nout = n;
699   } else {
700     if (idxout) {
701       for (i=0; i<n; i++) {
702         if (idx[i] < 0) continue;
703         if (idx[i] < start) continue;
704         if (idx[i] > end) continue;
705         tmp = globals[idx[i] - start];
706         if (tmp < 0) continue;
707         idxout[nf++] = tmp;
708       }
709     } else {
710       for (i=0; i<n; i++) {
711         if (idx[i] < 0) continue;
712         if (idx[i] < start) continue;
713         if (idx[i] > end) continue;
714         tmp = globals[idx[i] - start];
715         if (tmp < 0) continue;
716         nf++;
717       }
718     }
719     if (nout) *nout = nf;
720   }
721   PetscFunctionReturn(0);
722 }
723 
724 #undef __FUNCT__
725 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
726 /*@C
727     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
728      each index shared by more than one processor
729 
730     Collective on ISLocalToGlobalMapping
731 
732     Input Parameters:
733 .   mapping - the mapping from local to global indexing
734 
735     Output Parameter:
736 +   nproc - number of processors that are connected to this one
737 .   proc - neighboring processors
738 .   numproc - number of indices for each subdomain (processor)
739 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
740 
741     Level: advanced
742 
743     Concepts: mapping^local to global
744 
745     Fortran Usage:
746 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
747 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
748           PetscInt indices[nproc][numprocmax],ierr)
749         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
750         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
751 
752 
753 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
754           ISLocalToGlobalMappingRestoreInfo()
755 @*/
756 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
757 {
758   PetscErrorCode ierr;
759   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
760   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
761   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
762   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
763   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
764   PetscInt       first_procs,first_numprocs,*first_indices;
765   MPI_Request    *recv_waits,*send_waits;
766   MPI_Status     recv_status,*send_status,*recv_statuses;
767   MPI_Comm       comm;
768   PetscBool      debug = PETSC_FALSE;
769 
770   PetscFunctionBegin;
771   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
772   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
773   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
774   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
775   if (size == 1) {
776     *nproc         = 0;
777     *procs         = NULL;
778     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
779     (*numprocs)[0] = 0;
780     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
781     (*indices)[0]  = NULL;
782     PetscFunctionReturn(0);
783   }
784 
785   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
786 
787   /*
788     Notes on ISLocalToGlobalMappingGetBlockInfo
789 
790     globally owned node - the nodes that have been assigned to this processor in global
791            numbering, just for this routine.
792 
793     nontrivial globally owned node - node assigned to this processor that is on a subdomain
794            boundary (i.e. is has more than one local owner)
795 
796     locally owned node - node that exists on this processors subdomain
797 
798     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
799            local subdomain
800   */
801   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
802   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
803   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
804 
805   for (i=0; i<n; i++) {
806     if (lindices[i] > max) max = lindices[i];
807   }
808   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
809   Ng++;
810   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
811   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
812   scale  = Ng/size + 1;
813   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
814   rstart = scale*rank;
815 
816   /* determine ownership ranges of global indices */
817   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
818   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
819 
820   /* determine owners of each local node  */
821   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
822   for (i=0; i<n; i++) {
823     proc             = lindices[i]/scale; /* processor that globally owns this index */
824     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
825     owner[i]         = proc;
826     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
827   }
828   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
829   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
830 
831   /* inform other processors of number of messages and max length*/
832   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
833   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
834 
835   /* post receives for owned rows */
836   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
837   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
838   for (i=0; i<nrecvs; i++) {
839     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
840   }
841 
842   /* pack messages containing lists of local nodes to owners */
843   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
844   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
845   starts[0] = 0;
846   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
847   for (i=0; i<n; i++) {
848     sends[starts[owner[i]]++] = lindices[i];
849     sends[starts[owner[i]]++] = i;
850   }
851   ierr = PetscFree(owner);CHKERRQ(ierr);
852   starts[0] = 0;
853   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
854 
855   /* send the messages */
856   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
857   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
858   cnt = 0;
859   for (i=0; i<size; i++) {
860     if (nprocs[2*i]) {
861       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
862       dest[cnt] = i;
863       cnt++;
864     }
865   }
866   ierr = PetscFree(starts);CHKERRQ(ierr);
867 
868   /* wait on receives */
869   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
870   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
871   cnt  = nrecvs;
872   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
873   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
874   while (cnt) {
875     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
876     /* unpack receives into our local space */
877     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
878     source[imdex] = recv_status.MPI_SOURCE;
879     len[imdex]    = len[imdex]/2;
880     /* count how many local owners for each of my global owned indices */
881     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
882     cnt--;
883   }
884   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
885 
886   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
887   nowned  = 0;
888   nownedm = 0;
889   for (i=0; i<ng; i++) {
890     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
891   }
892 
893   /* create single array to contain rank of all local owners of each globally owned index */
894   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
895   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
896   starts[0] = 0;
897   for (i=1; i<ng; i++) {
898     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
899     else starts[i] = starts[i-1];
900   }
901 
902   /* for each nontrival globally owned node list all arriving processors */
903   for (i=0; i<nrecvs; i++) {
904     for (j=0; j<len[i]; j++) {
905       node = recvs[2*i*nmax+2*j]-rstart;
906       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
907     }
908   }
909 
910   if (debug) { /* -----------------------------------  */
911     starts[0] = 0;
912     for (i=1; i<ng; i++) {
913       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
914       else starts[i] = starts[i-1];
915     }
916     for (i=0; i<ng; i++) {
917       if (nownedsenders[i] > 1) {
918         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
919         for (j=0; j<nownedsenders[i]; j++) {
920           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
921         }
922         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
923       }
924     }
925     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
926   } /* -----------------------------------  */
927 
928   /* wait on original sends */
929   if (nsends) {
930     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
931     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
932     ierr = PetscFree(send_status);CHKERRQ(ierr);
933   }
934   ierr = PetscFree(send_waits);CHKERRQ(ierr);
935   ierr = PetscFree(sends);CHKERRQ(ierr);
936   ierr = PetscFree(nprocs);CHKERRQ(ierr);
937 
938   /* pack messages to send back to local owners */
939   starts[0] = 0;
940   for (i=1; i<ng; i++) {
941     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
942     else starts[i] = starts[i-1];
943   }
944   nsends2 = nrecvs;
945   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
946   for (i=0; i<nrecvs; i++) {
947     nprocs[i] = 1;
948     for (j=0; j<len[i]; j++) {
949       node = recvs[2*i*nmax+2*j]-rstart;
950       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
951     }
952   }
953   nt = 0;
954   for (i=0; i<nsends2; i++) nt += nprocs[i];
955 
956   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
957   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
958 
959   starts2[0] = 0;
960   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
961   /*
962      Each message is 1 + nprocs[i] long, and consists of
963        (0) the number of nodes being sent back
964        (1) the local node number,
965        (2) the number of processors sharing it,
966        (3) the processors sharing it
967   */
968   for (i=0; i<nsends2; i++) {
969     cnt = 1;
970     sends2[starts2[i]] = 0;
971     for (j=0; j<len[i]; j++) {
972       node = recvs[2*i*nmax+2*j]-rstart;
973       if (nownedsenders[node] > 1) {
974         sends2[starts2[i]]++;
975         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
976         sends2[starts2[i]+cnt++] = nownedsenders[node];
977         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
978         cnt += nownedsenders[node];
979       }
980     }
981   }
982 
983   /* receive the message lengths */
984   nrecvs2 = nsends;
985   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
986   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
987   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
988   for (i=0; i<nrecvs2; i++) {
989     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
990   }
991 
992   /* send the message lengths */
993   for (i=0; i<nsends2; i++) {
994     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
995   }
996 
997   /* wait on receives of lens */
998   if (nrecvs2) {
999     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1000     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1001     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1002   }
1003   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1004 
1005   starts3[0] = 0;
1006   nt         = 0;
1007   for (i=0; i<nrecvs2-1; i++) {
1008     starts3[i+1] = starts3[i] + lens2[i];
1009     nt          += lens2[i];
1010   }
1011   if (nrecvs2) nt += lens2[nrecvs2-1];
1012 
1013   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
1014   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
1015   for (i=0; i<nrecvs2; i++) {
1016     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1017   }
1018 
1019   /* send the messages */
1020   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
1021   for (i=0; i<nsends2; i++) {
1022     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1023   }
1024 
1025   /* wait on receives */
1026   if (nrecvs2) {
1027     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1028     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1029     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1030   }
1031   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1032   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1033 
1034   if (debug) { /* -----------------------------------  */
1035     cnt = 0;
1036     for (i=0; i<nrecvs2; i++) {
1037       nt = recvs2[cnt++];
1038       for (j=0; j<nt; j++) {
1039         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1040         for (k=0; k<recvs2[cnt+1]; k++) {
1041           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1042         }
1043         cnt += 2 + recvs2[cnt+1];
1044         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1045       }
1046     }
1047     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1048   } /* -----------------------------------  */
1049 
1050   /* count number subdomains for each local node */
1051   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1052   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1053   cnt  = 0;
1054   for (i=0; i<nrecvs2; i++) {
1055     nt = recvs2[cnt++];
1056     for (j=0; j<nt; j++) {
1057       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1058       cnt += 2 + recvs2[cnt+1];
1059     }
1060   }
1061   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1062   *nproc    = nt;
1063   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
1064   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
1065   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
1066   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1067   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1068   cnt       = 0;
1069   for (i=0; i<size; i++) {
1070     if (nprocs[i] > 0) {
1071       bprocs[i]        = cnt;
1072       (*procs)[cnt]    = i;
1073       (*numprocs)[cnt] = nprocs[i];
1074       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1075       cnt++;
1076     }
1077   }
1078 
1079   /* make the list of subdomains for each nontrivial local node */
1080   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1081   cnt  = 0;
1082   for (i=0; i<nrecvs2; i++) {
1083     nt = recvs2[cnt++];
1084     for (j=0; j<nt; j++) {
1085       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1086       cnt += 2 + recvs2[cnt+1];
1087     }
1088   }
1089   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1090   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1091 
1092   /* sort the node indexing by their global numbers */
1093   nt = *nproc;
1094   for (i=0; i<nt; i++) {
1095     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
1096     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1097     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1098     ierr = PetscFree(tmp);CHKERRQ(ierr);
1099   }
1100 
1101   if (debug) { /* -----------------------------------  */
1102     nt = *nproc;
1103     for (i=0; i<nt; i++) {
1104       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1105       for (j=0; j<(*numprocs)[i]; j++) {
1106         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1107       }
1108       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1109     }
1110     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1111   } /* -----------------------------------  */
1112 
1113   /* wait on sends */
1114   if (nsends2) {
1115     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1116     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1117     ierr = PetscFree(send_status);CHKERRQ(ierr);
1118   }
1119 
1120   ierr = PetscFree(starts3);CHKERRQ(ierr);
1121   ierr = PetscFree(dest);CHKERRQ(ierr);
1122   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1123 
1124   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1125   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1126   ierr = PetscFree(starts);CHKERRQ(ierr);
1127   ierr = PetscFree(starts2);CHKERRQ(ierr);
1128   ierr = PetscFree(lens2);CHKERRQ(ierr);
1129 
1130   ierr = PetscFree(source);CHKERRQ(ierr);
1131   ierr = PetscFree(len);CHKERRQ(ierr);
1132   ierr = PetscFree(recvs);CHKERRQ(ierr);
1133   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1134   ierr = PetscFree(sends2);CHKERRQ(ierr);
1135 
1136   /* put the information about myself as the first entry in the list */
1137   first_procs    = (*procs)[0];
1138   first_numprocs = (*numprocs)[0];
1139   first_indices  = (*indices)[0];
1140   for (i=0; i<*nproc; i++) {
1141     if ((*procs)[i] == rank) {
1142       (*procs)[0]    = (*procs)[i];
1143       (*numprocs)[0] = (*numprocs)[i];
1144       (*indices)[0]  = (*indices)[i];
1145       (*procs)[i]    = first_procs;
1146       (*numprocs)[i] = first_numprocs;
1147       (*indices)[i]  = first_indices;
1148       break;
1149     }
1150   }
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 #undef __FUNCT__
1155 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1156 /*@C
1157     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1158 
1159     Collective on ISLocalToGlobalMapping
1160 
1161     Input Parameters:
1162 .   mapping - the mapping from local to global indexing
1163 
1164     Output Parameter:
1165 +   nproc - number of processors that are connected to this one
1166 .   proc - neighboring processors
1167 .   numproc - number of indices for each processor
1168 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1169 
1170     Level: advanced
1171 
1172 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1173           ISLocalToGlobalMappingGetInfo()
1174 @*/
1175 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1176 {
1177   PetscErrorCode ierr;
1178   PetscInt       i;
1179 
1180   PetscFunctionBegin;
1181   ierr = PetscFree(*procs);CHKERRQ(ierr);
1182   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1183   if (*indices) {
1184     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1185     for (i=1; i<*nproc; i++) {
1186       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1187     }
1188     ierr = PetscFree(*indices);CHKERRQ(ierr);
1189   }
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1195 /*@C
1196     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1197      each index shared by more than one processor
1198 
1199     Collective on ISLocalToGlobalMapping
1200 
1201     Input Parameters:
1202 .   mapping - the mapping from local to global indexing
1203 
1204     Output Parameter:
1205 +   nproc - number of processors that are connected to this one
1206 .   proc - neighboring processors
1207 .   numproc - number of indices for each subdomain (processor)
1208 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1209 
1210     Level: advanced
1211 
1212     Concepts: mapping^local to global
1213 
1214     Fortran Usage:
1215 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1216 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1217           PetscInt indices[nproc][numprocmax],ierr)
1218         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1219         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1220 
1221 
1222 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1223           ISLocalToGlobalMappingRestoreInfo()
1224 @*/
1225 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1226 {
1227   PetscErrorCode ierr;
1228   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;
1229 
1230   PetscFunctionBegin;
1231   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1232   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr);
1233   ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1234   for (i=0; i<*nproc; i++) {
1235     ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr);
1236     for (j=0; j<(*numprocs)[i]; j++) {
1237       for (k=0; k<bs; k++) {
1238         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1239       }
1240     }
1241     (*numprocs)[i] *= bs;
1242   }
1243   if (bindices) {
1244     ierr = PetscFree(bindices[0]);CHKERRQ(ierr);
1245     for (i=1; i<*nproc; i++) {
1246       ierr = PetscFree(bindices[i]);CHKERRQ(ierr);
1247     }
1248     ierr = PetscFree(bindices);CHKERRQ(ierr);
1249   }
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 #undef __FUNCT__
1254 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1255 /*@C
1256     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1257 
1258     Collective on ISLocalToGlobalMapping
1259 
1260     Input Parameters:
1261 .   mapping - the mapping from local to global indexing
1262 
1263     Output Parameter:
1264 +   nproc - number of processors that are connected to this one
1265 .   proc - neighboring processors
1266 .   numproc - number of indices for each processor
1267 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1268 
1269     Level: advanced
1270 
1271 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1272           ISLocalToGlobalMappingGetInfo()
1273 @*/
1274 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1275 {
1276   PetscErrorCode ierr;
1277 
1278   PetscFunctionBegin;
1279   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1280   PetscFunctionReturn(0);
1281 }
1282 
1283 #undef __FUNCT__
1284 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1285 /*@C
1286    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1287 
1288    Not Collective
1289 
1290    Input Arguments:
1291 . ltog - local to global mapping
1292 
1293    Output Arguments:
1294 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1295 
1296    Level: advanced
1297 
1298    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1299 
1300 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1301 @*/
1302 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1303 {
1304   PetscFunctionBegin;
1305   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1306   PetscValidPointer(array,2);
1307   if (ltog->bs == 1) {
1308     *array = ltog->indices;
1309   } else {
1310     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1311     const PetscInt *ii;
1312     PetscErrorCode ierr;
1313 
1314     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1315     *array = jj;
1316     k    = 0;
1317     ii   = ltog->indices;
1318     for (i=0; i<n; i++)
1319       for (j=0; j<bs; j++)
1320         jj[k++] = bs*ii[i] + j;
1321   }
1322   PetscFunctionReturn(0);
1323 }
1324 
1325 #undef __FUNCT__
1326 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1327 /*@C
1328    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1329 
1330    Not Collective
1331 
1332    Input Arguments:
1333 + ltog - local to global mapping
1334 - array - array of indices
1335 
1336    Level: advanced
1337 
1338 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1339 @*/
1340 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1341 {
1342   PetscFunctionBegin;
1343   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1344   PetscValidPointer(array,2);
1345   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1346 
1347   if (ltog->bs > 1) {
1348     PetscErrorCode ierr;
1349     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1350   }
1351   PetscFunctionReturn(0);
1352 }
1353 
1354 #undef __FUNCT__
1355 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1356 /*@C
1357    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1358 
1359    Not Collective
1360 
1361    Input Arguments:
1362 . ltog - local to global mapping
1363 
1364    Output Arguments:
1365 . array - array of indices
1366 
1367    Level: advanced
1368 
1369 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1370 @*/
1371 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1372 {
1373   PetscFunctionBegin;
1374   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1375   PetscValidPointer(array,2);
1376   *array = ltog->indices;
1377   PetscFunctionReturn(0);
1378 }
1379 
1380 #undef __FUNCT__
1381 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1382 /*@C
1383    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1384 
1385    Not Collective
1386 
1387    Input Arguments:
1388 + ltog - local to global mapping
1389 - array - array of indices
1390 
1391    Level: advanced
1392 
1393 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1394 @*/
1395 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1396 {
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1399   PetscValidPointer(array,2);
1400   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1401   *array = NULL;
1402   PetscFunctionReturn(0);
1403 }
1404 
1405 #undef __FUNCT__
1406 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1407 /*@C
1408    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1409 
1410    Not Collective
1411 
1412    Input Arguments:
1413 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1414 . n - number of mappings to concatenate
1415 - ltogs - local to global mappings
1416 
1417    Output Arguments:
1418 . ltogcat - new mapping
1419 
1420    Note: this currently always returns a mapping with block size of 1
1421 
1422    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1423 
1424    Level: advanced
1425 
1426 .seealso: ISLocalToGlobalMappingCreate()
1427 @*/
1428 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1429 {
1430   PetscInt       i,cnt,m,*idx;
1431   PetscErrorCode ierr;
1432 
1433   PetscFunctionBegin;
1434   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1435   if (n > 0) PetscValidPointer(ltogs,3);
1436   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1437   PetscValidPointer(ltogcat,4);
1438   for (cnt=0,i=0; i<n; i++) {
1439     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1440     cnt += m;
1441   }
1442   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1443   for (cnt=0,i=0; i<n; i++) {
1444     const PetscInt *subidx;
1445     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1446     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1447     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1448     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1449     cnt += m;
1450   }
1451   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1452   PetscFunctionReturn(0);
1453 }
1454 
1455 
1456