xref: /petsc/src/vec/is/utils/isltog.c (revision f5476a2b443e0eedb22d31cb1b2d85b9b918f73f)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->bs*mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   if (!isblock) {
142     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
143     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
144     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
145   } else {
146     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150   }
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
227   *bs = mapping->bs;
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
233 /*@
234     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
235     ordering and a global parallel ordering.
236 
237     Not Collective, but communicator may have more than one process
238 
239     Input Parameters:
240 +   comm - MPI communicator
241 .   bs - the block size
242 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
243 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
244 -   mode - see PetscCopyMode
245 
246     Output Parameter:
247 .   mapping - new mapping data structure
248 
249     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
250     Level: advanced
251 
252     Concepts: mapping^local to global
253 
254 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
255 @*/
256 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
257 {
258   PetscErrorCode ierr;
259   PetscInt       *in;
260 
261   PetscFunctionBegin;
262   if (n) PetscValidIntPointer(indices,3);
263   PetscValidPointer(mapping,4);
264 
265   *mapping = NULL;
266   ierr = ISInitializePackage();CHKERRQ(ierr);
267 
268   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
269                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
270   (*mapping)->n  = n;
271   (*mapping)->bs = bs;
272   /*
273     Do not create the global to local mapping. This is only created if
274     ISGlobalToLocalMapping() is called
275   */
276   (*mapping)->globals = 0;
277   if (mode == PETSC_COPY_VALUES) {
278     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
279     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
280     (*mapping)->indices = in;
281     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
282   } else if (mode == PETSC_OWN_POINTER) {
283     (*mapping)->indices = (PetscInt*)indices;
284     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
285   }
286   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
287   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
288   PetscFunctionReturn(0);
289 }
290 
291 #undef __FUNCT__
292 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
293 /*@
294    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
295    ordering and a global parallel ordering.
296 
297    Note Collective
298 
299    Input Parameters:
300 .  mapping - mapping data structure
301 
302    Level: advanced
303 
304 .seealso: ISLocalToGlobalMappingCreate()
305 @*/
306 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
307 {
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*mapping) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
313   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
314   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
315   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
316   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
317   *mapping = 0;
318   PetscFunctionReturn(0);
319 }
320 
321 #undef __FUNCT__
322 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
323 /*@
324     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
325     a new index set using the global numbering defined in an ISLocalToGlobalMapping
326     context.
327 
328     Not collective
329 
330     Input Parameters:
331 +   mapping - mapping between local and global numbering
332 -   is - index set in local numbering
333 
334     Output Parameters:
335 .   newis - index set in global numbering
336 
337     Level: advanced
338 
339     Concepts: mapping^local to global
340 
341 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
342           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
343 @*/
344 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
345 {
346   PetscErrorCode ierr;
347   PetscInt       n,*idxout;
348   const PetscInt *idxin;
349 
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
352   PetscValidHeaderSpecific(is,IS_CLASSID,2);
353   PetscValidPointer(newis,3);
354 
355   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
356   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
357   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
358   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
359   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
360   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "ISLocalToGlobalMappingApply"
366 /*@
367    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
368    and converts them to the global numbering.
369 
370    Not collective
371 
372    Input Parameters:
373 +  mapping - the local to global mapping context
374 .  N - number of integers
375 -  in - input indices in local numbering
376 
377    Output Parameter:
378 .  out - indices in global numbering
379 
380    Notes:
381    The in and out array parameters may be identical.
382 
383    Level: advanced
384 
385 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
386           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
387           AOPetscToApplication(), ISGlobalToLocalMappingApply()
388 
389     Concepts: mapping^local to global
390 @*/
391 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
392 {
393   PetscInt i,bs,Nmax;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
397   bs   = mapping->bs;
398   Nmax = bs*mapping->n;
399   if (bs == 1) {
400     const PetscInt *idx = mapping->indices;
401     for (i=0; i<N; i++) {
402       if (in[i] < 0) {
403         out[i] = in[i];
404         continue;
405       }
406       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
407       out[i] = idx[in[i]];
408     }
409   } else {
410     const PetscInt *idx = mapping->indices;
411     for (i=0; i<N; i++) {
412       if (in[i] < 0) {
413         out[i] = in[i];
414         continue;
415       }
416       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
417       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
418     }
419   }
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
425 /*@
426    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
427 
428    Not collective
429 
430    Input Parameters:
431 +  mapping - the local to global mapping context
432 .  N - number of integers
433 -  in - input indices in local block numbering
434 
435    Output Parameter:
436 .  out - indices in global block numbering
437 
438    Notes:
439    The in and out array parameters may be identical.
440 
441    Example:
442      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
443      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
444 
445    Level: advanced
446 
447 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
448           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
449           AOPetscToApplication(), ISGlobalToLocalMappingApply()
450 
451     Concepts: mapping^local to global
452 @*/
453 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
454 {
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
458   {
459     PetscInt i,Nmax = mapping->n;
460     const PetscInt *idx = mapping->indices;
461 
462     for (i=0; i<N; i++) {
463       if (in[i] < 0) {
464         out[i] = in[i];
465         continue;
466       }
467       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
468       out[i] = idx[in[i]];
469     }
470   }
471   PetscFunctionReturn(0);
472 }
473 
474 /* -----------------------------------------------------------------------------------------*/
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
478 /*
479     Creates the global fields in the ISLocalToGlobalMapping structure
480 */
481 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
482 {
483   PetscErrorCode ierr;
484   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
485 
486   PetscFunctionBegin;
487   end   = 0;
488   start = PETSC_MAX_INT;
489 
490   for (i=0; i<n; i++) {
491     if (idx[i] < 0) continue;
492     if (idx[i] < start) start = idx[i];
493     if (idx[i] > end)   end   = idx[i];
494   }
495   if (start > end) {start = 0; end = -1;}
496   mapping->globalstart = start;
497   mapping->globalend   = end;
498 
499   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
500   mapping->globals = globals;
501   for (i=0; i<end-start+1; i++) globals[i] = -1;
502   for (i=0; i<n; i++) {
503     if (idx[i] < 0) continue;
504     globals[idx[i] - start] = i;
505   }
506 
507   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "ISGlobalToLocalMappingApply"
513 /*@
514     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
515     specified with a global numbering.
516 
517     Not collective
518 
519     Input Parameters:
520 +   mapping - mapping between local and global numbering
521 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
522            IS_GTOLM_DROP - drops the indices with no local value from the output list
523 .   n - number of global indices to map
524 -   idx - global indices to map
525 
526     Output Parameters:
527 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
528 -   idxout - local index of each global index, one must pass in an array long enough
529              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
530              idxout == NULL to determine the required length (returned in nout)
531              and then allocate the required space and call ISGlobalToLocalMappingApply()
532              a second time to set the values.
533 
534     Notes:
535     Either nout or idxout may be NULL. idx and idxout may be identical.
536 
537     This is not scalable in memory usage. Each processor requires O(Nglobal) size
538     array to compute these.
539 
540     Level: advanced
541 
542     Developer Note: The manual page states that idx and idxout may be identical but the calling
543        sequence declares idx as const so it cannot be the same as idxout.
544 
545     Concepts: mapping^global to local
546 
547 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
548           ISLocalToGlobalMappingDestroy()
549 @*/
550 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
551                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
552 {
553   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
558   if (!mapping->globals) {
559     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
560   }
561   globals = mapping->globals;
562   start   = mapping->globalstart;
563   end     = mapping->globalend;
564   bs      = mapping->bs;
565 
566   if (type == IS_GTOLM_MASK) {
567     if (idxout) {
568       for (i=0; i<n; i++) {
569         if (idx[i] < 0) idxout[i] = idx[i];
570         else if (idx[i] < bs*start) idxout[i] = -1;
571         else if (idx[i] > bs*end)   idxout[i] = -1;
572         else                        idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
573       }
574     }
575     if (nout) *nout = n;
576   } else {
577     if (idxout) {
578       for (i=0; i<n; i++) {
579         if (idx[i] < 0) continue;
580         if (idx[i] < bs*start) continue;
581         if (idx[i] > bs*end) continue;
582         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
583         if (tmp < 0) continue;
584         idxout[nf++] = tmp;
585       }
586     } else {
587       for (i=0; i<n; i++) {
588         if (idx[i] < 0) continue;
589         if (idx[i] < bs*start) continue;
590         if (idx[i] > bs*end) continue;
591         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
592         if (tmp < 0) continue;
593         nf++;
594       }
595     }
596     if (nout) *nout = nf;
597   }
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
603 /*@
604     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
605     specified with a block global numbering.
606 
607     Not collective
608 
609     Input Parameters:
610 +   mapping - mapping between local and global numbering
611 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
612            IS_GTOLM_DROP - drops the indices with no local value from the output list
613 .   n - number of global indices to map
614 -   idx - global indices to map
615 
616     Output Parameters:
617 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
618 -   idxout - local index of each global index, one must pass in an array long enough
619              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
620              idxout == NULL to determine the required length (returned in nout)
621              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
622              a second time to set the values.
623 
624     Notes:
625     Either nout or idxout may be NULL. idx and idxout may be identical.
626 
627     This is not scalable in memory usage. Each processor requires O(Nglobal) size
628     array to compute these.
629 
630     Level: advanced
631 
632     Developer Note: The manual page states that idx and idxout may be identical but the calling
633        sequence declares idx as const so it cannot be the same as idxout.
634 
635     Concepts: mapping^global to local
636 
637 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
638           ISLocalToGlobalMappingDestroy()
639 @*/
640 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
641                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
642 {
643   PetscInt       i,*globals,nf = 0,tmp,start,end;
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
648   if (!mapping->globals) {
649     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
650   }
651   globals = mapping->globals;
652   start   = mapping->globalstart;
653   end     = mapping->globalend;
654 
655   if (type == IS_GTOLM_MASK) {
656     if (idxout) {
657       for (i=0; i<n; i++) {
658         if (idx[i] < 0) idxout[i] = idx[i];
659         else if (idx[i] < start) idxout[i] = -1;
660         else if (idx[i] > end)   idxout[i] = -1;
661         else                     idxout[i] = globals[idx[i] - start];
662       }
663     }
664     if (nout) *nout = n;
665   } else {
666     if (idxout) {
667       for (i=0; i<n; i++) {
668         if (idx[i] < 0) continue;
669         if (idx[i] < start) continue;
670         if (idx[i] > end) continue;
671         tmp = globals[idx[i] - start];
672         if (tmp < 0) continue;
673         idxout[nf++] = tmp;
674       }
675     } else {
676       for (i=0; i<n; i++) {
677         if (idx[i] < 0) continue;
678         if (idx[i] < start) continue;
679         if (idx[i] > end) continue;
680         tmp = globals[idx[i] - start];
681         if (tmp < 0) continue;
682         nf++;
683       }
684     }
685     if (nout) *nout = nf;
686   }
687   PetscFunctionReturn(0);
688 }
689 
690 #undef __FUNCT__
691 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
692 /*@C
693     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
694      each index shared by more than one processor
695 
696     Collective on ISLocalToGlobalMapping
697 
698     Input Parameters:
699 .   mapping - the mapping from local to global indexing
700 
701     Output Parameter:
702 +   nproc - number of processors that are connected to this one
703 .   proc - neighboring processors
704 .   numproc - number of indices for each subdomain (processor)
705 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
706 
707     Level: advanced
708 
709     Concepts: mapping^local to global
710 
711     Fortran Usage:
712 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
713 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
714           PetscInt indices[nproc][numprocmax],ierr)
715         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
716         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
717 
718 
719 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
720           ISLocalToGlobalMappingRestoreInfo()
721 @*/
722 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
723 {
724   PetscErrorCode ierr;
725   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
726   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
727   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
728   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
729   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
730   PetscInt       first_procs,first_numprocs,*first_indices;
731   MPI_Request    *recv_waits,*send_waits;
732   MPI_Status     recv_status,*send_status,*recv_statuses;
733   MPI_Comm       comm;
734   PetscBool      debug = PETSC_FALSE;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
738   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
739   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
740   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
741   if (size == 1) {
742     *nproc         = 0;
743     *procs         = NULL;
744     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
745     (*numprocs)[0] = 0;
746     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
747     (*indices)[0]  = NULL;
748     PetscFunctionReturn(0);
749   }
750 
751   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
752 
753   /*
754     Notes on ISLocalToGlobalMappingGetBlockInfo
755 
756     globally owned node - the nodes that have been assigned to this processor in global
757            numbering, just for this routine.
758 
759     nontrivial globally owned node - node assigned to this processor that is on a subdomain
760            boundary (i.e. is has more than one local owner)
761 
762     locally owned node - node that exists on this processors subdomain
763 
764     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
765            local subdomain
766   */
767   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
768   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
769   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
770 
771   for (i=0; i<n; i++) {
772     if (lindices[i] > max) max = lindices[i];
773   }
774   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
775   Ng++;
776   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
777   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
778   scale  = Ng/size + 1;
779   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
780   rstart = scale*rank;
781 
782   /* determine ownership ranges of global indices */
783   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
784   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
785 
786   /* determine owners of each local node  */
787   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
788   for (i=0; i<n; i++) {
789     proc             = lindices[i]/scale; /* processor that globally owns this index */
790     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
791     owner[i]         = proc;
792     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
793   }
794   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
795   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
796 
797   /* inform other processors of number of messages and max length*/
798   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
799   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
800 
801   /* post receives for owned rows */
802   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
803   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
804   for (i=0; i<nrecvs; i++) {
805     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
806   }
807 
808   /* pack messages containing lists of local nodes to owners */
809   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
810   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
811   starts[0] = 0;
812   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
813   for (i=0; i<n; i++) {
814     sends[starts[owner[i]]++] = lindices[i];
815     sends[starts[owner[i]]++] = i;
816   }
817   ierr = PetscFree(owner);CHKERRQ(ierr);
818   starts[0] = 0;
819   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
820 
821   /* send the messages */
822   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
823   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
824   cnt = 0;
825   for (i=0; i<size; i++) {
826     if (nprocs[2*i]) {
827       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
828       dest[cnt] = i;
829       cnt++;
830     }
831   }
832   ierr = PetscFree(starts);CHKERRQ(ierr);
833 
834   /* wait on receives */
835   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
836   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
837   cnt  = nrecvs;
838   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
839   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
840   while (cnt) {
841     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
842     /* unpack receives into our local space */
843     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
844     source[imdex] = recv_status.MPI_SOURCE;
845     len[imdex]    = len[imdex]/2;
846     /* count how many local owners for each of my global owned indices */
847     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
848     cnt--;
849   }
850   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
851 
852   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
853   nowned  = 0;
854   nownedm = 0;
855   for (i=0; i<ng; i++) {
856     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
857   }
858 
859   /* create single array to contain rank of all local owners of each globally owned index */
860   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
861   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
862   starts[0] = 0;
863   for (i=1; i<ng; i++) {
864     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
865     else starts[i] = starts[i-1];
866   }
867 
868   /* for each nontrival globally owned node list all arriving processors */
869   for (i=0; i<nrecvs; i++) {
870     for (j=0; j<len[i]; j++) {
871       node = recvs[2*i*nmax+2*j]-rstart;
872       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
873     }
874   }
875 
876   if (debug) { /* -----------------------------------  */
877     starts[0] = 0;
878     for (i=1; i<ng; i++) {
879       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
880       else starts[i] = starts[i-1];
881     }
882     for (i=0; i<ng; i++) {
883       if (nownedsenders[i] > 1) {
884         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
885         for (j=0; j<nownedsenders[i]; j++) {
886           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
887         }
888         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
889       }
890     }
891     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
892   } /* -----------------------------------  */
893 
894   /* wait on original sends */
895   if (nsends) {
896     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
897     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
898     ierr = PetscFree(send_status);CHKERRQ(ierr);
899   }
900   ierr = PetscFree(send_waits);CHKERRQ(ierr);
901   ierr = PetscFree(sends);CHKERRQ(ierr);
902   ierr = PetscFree(nprocs);CHKERRQ(ierr);
903 
904   /* pack messages to send back to local owners */
905   starts[0] = 0;
906   for (i=1; i<ng; i++) {
907     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
908     else starts[i] = starts[i-1];
909   }
910   nsends2 = nrecvs;
911   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
912   for (i=0; i<nrecvs; i++) {
913     nprocs[i] = 1;
914     for (j=0; j<len[i]; j++) {
915       node = recvs[2*i*nmax+2*j]-rstart;
916       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
917     }
918   }
919   nt = 0;
920   for (i=0; i<nsends2; i++) nt += nprocs[i];
921 
922   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
923   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
924 
925   starts2[0] = 0;
926   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
927   /*
928      Each message is 1 + nprocs[i] long, and consists of
929        (0) the number of nodes being sent back
930        (1) the local node number,
931        (2) the number of processors sharing it,
932        (3) the processors sharing it
933   */
934   for (i=0; i<nsends2; i++) {
935     cnt = 1;
936     sends2[starts2[i]] = 0;
937     for (j=0; j<len[i]; j++) {
938       node = recvs[2*i*nmax+2*j]-rstart;
939       if (nownedsenders[node] > 1) {
940         sends2[starts2[i]]++;
941         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
942         sends2[starts2[i]+cnt++] = nownedsenders[node];
943         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
944         cnt += nownedsenders[node];
945       }
946     }
947   }
948 
949   /* receive the message lengths */
950   nrecvs2 = nsends;
951   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
952   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
953   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
954   for (i=0; i<nrecvs2; i++) {
955     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
956   }
957 
958   /* send the message lengths */
959   for (i=0; i<nsends2; i++) {
960     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
961   }
962 
963   /* wait on receives of lens */
964   if (nrecvs2) {
965     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
966     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
967     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
968   }
969   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
970 
971   starts3[0] = 0;
972   nt         = 0;
973   for (i=0; i<nrecvs2-1; i++) {
974     starts3[i+1] = starts3[i] + lens2[i];
975     nt          += lens2[i];
976   }
977   if (nrecvs2) nt += lens2[nrecvs2-1];
978 
979   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
980   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
981   for (i=0; i<nrecvs2; i++) {
982     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
983   }
984 
985   /* send the messages */
986   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
987   for (i=0; i<nsends2; i++) {
988     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
989   }
990 
991   /* wait on receives */
992   if (nrecvs2) {
993     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
994     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
995     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
996   }
997   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
998   ierr = PetscFree(nprocs);CHKERRQ(ierr);
999 
1000   if (debug) { /* -----------------------------------  */
1001     cnt = 0;
1002     for (i=0; i<nrecvs2; i++) {
1003       nt = recvs2[cnt++];
1004       for (j=0; j<nt; j++) {
1005         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1006         for (k=0; k<recvs2[cnt+1]; k++) {
1007           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1008         }
1009         cnt += 2 + recvs2[cnt+1];
1010         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1011       }
1012     }
1013     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1014   } /* -----------------------------------  */
1015 
1016   /* count number subdomains for each local node */
1017   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1018   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1019   cnt  = 0;
1020   for (i=0; i<nrecvs2; i++) {
1021     nt = recvs2[cnt++];
1022     for (j=0; j<nt; j++) {
1023       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1024       cnt += 2 + recvs2[cnt+1];
1025     }
1026   }
1027   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1028   *nproc    = nt;
1029   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
1030   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
1031   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
1032   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1033   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1034   cnt       = 0;
1035   for (i=0; i<size; i++) {
1036     if (nprocs[i] > 0) {
1037       bprocs[i]        = cnt;
1038       (*procs)[cnt]    = i;
1039       (*numprocs)[cnt] = nprocs[i];
1040       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1041       cnt++;
1042     }
1043   }
1044 
1045   /* make the list of subdomains for each nontrivial local node */
1046   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1047   cnt  = 0;
1048   for (i=0; i<nrecvs2; i++) {
1049     nt = recvs2[cnt++];
1050     for (j=0; j<nt; j++) {
1051       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1052       cnt += 2 + recvs2[cnt+1];
1053     }
1054   }
1055   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1056   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1057 
1058   /* sort the node indexing by their global numbers */
1059   nt = *nproc;
1060   for (i=0; i<nt; i++) {
1061     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
1062     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1063     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1064     ierr = PetscFree(tmp);CHKERRQ(ierr);
1065   }
1066 
1067   if (debug) { /* -----------------------------------  */
1068     nt = *nproc;
1069     for (i=0; i<nt; i++) {
1070       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1071       for (j=0; j<(*numprocs)[i]; j++) {
1072         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1073       }
1074       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1075     }
1076     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1077   } /* -----------------------------------  */
1078 
1079   /* wait on sends */
1080   if (nsends2) {
1081     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1082     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1083     ierr = PetscFree(send_status);CHKERRQ(ierr);
1084   }
1085 
1086   ierr = PetscFree(starts3);CHKERRQ(ierr);
1087   ierr = PetscFree(dest);CHKERRQ(ierr);
1088   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1089 
1090   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1091   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1092   ierr = PetscFree(starts);CHKERRQ(ierr);
1093   ierr = PetscFree(starts2);CHKERRQ(ierr);
1094   ierr = PetscFree(lens2);CHKERRQ(ierr);
1095 
1096   ierr = PetscFree(source);CHKERRQ(ierr);
1097   ierr = PetscFree(len);CHKERRQ(ierr);
1098   ierr = PetscFree(recvs);CHKERRQ(ierr);
1099   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1100   ierr = PetscFree(sends2);CHKERRQ(ierr);
1101 
1102   /* put the information about myself as the first entry in the list */
1103   first_procs    = (*procs)[0];
1104   first_numprocs = (*numprocs)[0];
1105   first_indices  = (*indices)[0];
1106   for (i=0; i<*nproc; i++) {
1107     if ((*procs)[i] == rank) {
1108       (*procs)[0]    = (*procs)[i];
1109       (*numprocs)[0] = (*numprocs)[i];
1110       (*indices)[0]  = (*indices)[i];
1111       (*procs)[i]    = first_procs;
1112       (*numprocs)[i] = first_numprocs;
1113       (*indices)[i]  = first_indices;
1114       break;
1115     }
1116   }
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 #undef __FUNCT__
1121 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1122 /*@C
1123     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1124 
1125     Collective on ISLocalToGlobalMapping
1126 
1127     Input Parameters:
1128 .   mapping - the mapping from local to global indexing
1129 
1130     Output Parameter:
1131 +   nproc - number of processors that are connected to this one
1132 .   proc - neighboring processors
1133 .   numproc - number of indices for each processor
1134 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1135 
1136     Level: advanced
1137 
1138 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1139           ISLocalToGlobalMappingGetInfo()
1140 @*/
1141 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1142 {
1143   PetscErrorCode ierr;
1144   PetscInt       i;
1145 
1146   PetscFunctionBegin;
1147   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1148   ierr = PetscFree(*procs);CHKERRQ(ierr);
1149   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1150   if (*indices) {
1151     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1152     for (i=1; i<*nproc; i++) {
1153       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1154     }
1155     ierr = PetscFree(*indices);CHKERRQ(ierr);
1156   }
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 #undef __FUNCT__
1161 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1162 /*@C
1163     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1164      each index shared by more than one processor
1165 
1166     Collective on ISLocalToGlobalMapping
1167 
1168     Input Parameters:
1169 .   mapping - the mapping from local to global indexing
1170 
1171     Output Parameter:
1172 +   nproc - number of processors that are connected to this one
1173 .   proc - neighboring processors
1174 .   numproc - number of indices for each subdomain (processor)
1175 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1176 
1177     Level: advanced
1178 
1179     Concepts: mapping^local to global
1180 
1181     Fortran Usage:
1182 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1183 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1184           PetscInt indices[nproc][numprocmax],ierr)
1185         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1186         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1187 
1188 
1189 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1190           ISLocalToGlobalMappingRestoreInfo()
1191 @*/
1192 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1193 {
1194   PetscErrorCode ierr;
1195   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;
1196 
1197   PetscFunctionBegin;
1198   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1199   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr);
1200   ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1201   for (i=0; i<*nproc; i++) {
1202     ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr);
1203     for (j=0; j<(*numprocs)[i]; j++) {
1204       for (k=0; k<bs; k++) {
1205         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1206       }
1207     }
1208     (*numprocs)[i] *= bs;
1209   }
1210   if (bindices) {
1211     ierr = PetscFree(bindices[0]);CHKERRQ(ierr);
1212     for (i=1; i<*nproc; i++) {
1213       ierr = PetscFree(bindices[i]);CHKERRQ(ierr);
1214     }
1215     ierr = PetscFree(bindices);CHKERRQ(ierr);
1216   }
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 #undef __FUNCT__
1221 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1222 /*@C
1223     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1224 
1225     Collective on ISLocalToGlobalMapping
1226 
1227     Input Parameters:
1228 .   mapping - the mapping from local to global indexing
1229 
1230     Output Parameter:
1231 +   nproc - number of processors that are connected to this one
1232 .   proc - neighboring processors
1233 .   numproc - number of indices for each processor
1234 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1235 
1236     Level: advanced
1237 
1238 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1239           ISLocalToGlobalMappingGetInfo()
1240 @*/
1241 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1242 {
1243   PetscErrorCode ierr;
1244 
1245   PetscFunctionBegin;
1246   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1247   PetscFunctionReturn(0);
1248 }
1249 
1250 #undef __FUNCT__
1251 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1252 /*@C
1253    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1254 
1255    Not Collective
1256 
1257    Input Arguments:
1258 . ltog - local to global mapping
1259 
1260    Output Arguments:
1261 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1262 
1263    Level: advanced
1264 
1265    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1266 
1267 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1268 @*/
1269 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1270 {
1271   PetscFunctionBegin;
1272   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1273   PetscValidPointer(array,2);
1274   if (ltog->bs == 1) {
1275     *array = ltog->indices;
1276   } else {
1277     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1278     const PetscInt *ii;
1279     PetscErrorCode ierr;
1280 
1281     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1282     *array = jj;
1283     k    = 0;
1284     ii   = ltog->indices;
1285     for (i=0; i<n; i++)
1286       for (j=0; j<bs; j++)
1287         jj[k++] = bs*ii[i] + j;
1288   }
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1294 /*@C
1295    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1296 
1297    Not Collective
1298 
1299    Input Arguments:
1300 + ltog - local to global mapping
1301 - array - array of indices
1302 
1303    Level: advanced
1304 
1305 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1306 @*/
1307 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1308 {
1309   PetscFunctionBegin;
1310   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1311   PetscValidPointer(array,2);
1312   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1313 
1314   if (ltog->bs > 1) {
1315     PetscErrorCode ierr;
1316     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1317   }
1318   PetscFunctionReturn(0);
1319 }
1320 
1321 #undef __FUNCT__
1322 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1323 /*@C
1324    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1325 
1326    Not Collective
1327 
1328    Input Arguments:
1329 . ltog - local to global mapping
1330 
1331    Output Arguments:
1332 . array - array of indices
1333 
1334    Level: advanced
1335 
1336 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1337 @*/
1338 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1339 {
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1342   PetscValidPointer(array,2);
1343   *array = ltog->indices;
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 #undef __FUNCT__
1348 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1349 /*@C
1350    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1351 
1352    Not Collective
1353 
1354    Input Arguments:
1355 + ltog - local to global mapping
1356 - array - array of indices
1357 
1358    Level: advanced
1359 
1360 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1361 @*/
1362 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1363 {
1364   PetscFunctionBegin;
1365   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1366   PetscValidPointer(array,2);
1367   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1368   *array = NULL;
1369   PetscFunctionReturn(0);
1370 }
1371 
1372 #undef __FUNCT__
1373 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1374 /*@C
1375    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1376 
1377    Not Collective
1378 
1379    Input Arguments:
1380 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1381 . n - number of mappings to concatenate
1382 - ltogs - local to global mappings
1383 
1384    Output Arguments:
1385 . ltogcat - new mapping
1386 
1387    Note: this currently always returns a mapping with block size of 1
1388 
1389    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1390 
1391    Level: advanced
1392 
1393 .seealso: ISLocalToGlobalMappingCreate()
1394 @*/
1395 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1396 {
1397   PetscInt       i,cnt,m,*idx;
1398   PetscErrorCode ierr;
1399 
1400   PetscFunctionBegin;
1401   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1402   if (n > 0) PetscValidPointer(ltogs,3);
1403   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1404   PetscValidPointer(ltogcat,4);
1405   for (cnt=0,i=0; i<n; i++) {
1406     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1407     cnt += m;
1408   }
1409   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1410   for (cnt=0,i=0; i<n; i++) {
1411     const PetscInt *subidx;
1412     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1413     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1414     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1415     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1416     cnt += m;
1417   }
1418   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1419   PetscFunctionReturn(0);
1420 }
1421 
1422 
1423