xref: /petsc/src/vec/is/utils/isltog.c (revision 7ae38d14048e535a346ba522ffbdb8e04ba6fa16)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISLocalToGlobalMappingDuplicate"
11 /*@
12     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
13 
14     Not Collective
15 
16     Input Parameter:
17 .   ltog - local to global mapping
18 
19     Output Parameter:
20 .   nltog - the duplicated local to global mapping
21 
22     Level: advanced
23 
24     Concepts: mapping^local to global
25 
26 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
27 @*/
28 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
29 {
30   PetscErrorCode ierr;
31 
32   PetscFunctionBegin;
33   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
34   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
35   PetscFunctionReturn(0);
36 }
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
40 /*@
41     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
42 
43     Not Collective
44 
45     Input Parameter:
46 .   ltog - local to global mapping
47 
48     Output Parameter:
49 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
50 
51     Level: advanced
52 
53     Concepts: mapping^local to global
54 
55 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
56 @*/
57 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
58 {
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   PetscValidIntPointer(n,2);
62   *n = mapping->bs*mapping->n;
63   PetscFunctionReturn(0);
64 }
65 
66 #undef __FUNCT__
67 #define __FUNCT__ "ISLocalToGlobalMappingView"
68 /*@C
69     ISLocalToGlobalMappingView - View a local to global mapping
70 
71     Not Collective
72 
73     Input Parameters:
74 +   ltog - local to global mapping
75 -   viewer - viewer
76 
77     Level: advanced
78 
79     Concepts: mapping^local to global
80 
81 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
82 @*/
83 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
84 {
85   PetscInt       i;
86   PetscMPIInt    rank;
87   PetscBool      iascii;
88   PetscErrorCode ierr;
89 
90   PetscFunctionBegin;
91   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
92   if (!viewer) {
93     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
94   }
95   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
96 
97   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
98   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
99   if (iascii) {
100     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
101     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
102     for (i=0; i<mapping->n; i++) {
103       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
104     }
105     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
106     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
107   }
108   PetscFunctionReturn(0);
109 }
110 
111 #undef __FUNCT__
112 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
113 /*@
114     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
115     ordering and a global parallel ordering.
116 
117     Not collective
118 
119     Input Parameter:
120 .   is - index set containing the global numbers for each local number
121 
122     Output Parameter:
123 .   mapping - new mapping data structure
124 
125     Notes: the block size of the IS determines the block size of the mapping
126     Level: advanced
127 
128     Concepts: mapping^local to global
129 
130 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
131 @*/
132 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
133 {
134   PetscErrorCode ierr;
135   PetscInt       n,bs;
136   const PetscInt *indices;
137   MPI_Comm       comm;
138   PetscBool      isblock;
139 
140   PetscFunctionBegin;
141   PetscValidHeaderSpecific(is,IS_CLASSID,1);
142   PetscValidPointer(mapping,2);
143 
144   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
145   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
146   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
147   if (!isblock) {
148     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
149     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
150     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
151   } else {
152     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
153     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
154     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
155     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
162 /*@C
163     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
164     ordering and a global parallel ordering.
165 
166     Collective
167 
168     Input Parameter:
169 +   sf - star forest mapping contiguous local indices to (rank, offset)
170 -   start - first global index on this process
171 
172     Output Parameter:
173 .   mapping - new mapping data structure
174 
175     Level: advanced
176 
177     Concepts: mapping^local to global
178 
179 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
180 @*/
181 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
182 {
183   PetscErrorCode ierr;
184   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
185   const PetscInt *ilocal;
186   MPI_Comm       comm;
187 
188   PetscFunctionBegin;
189   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
190   PetscValidPointer(mapping,3);
191 
192   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
193   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
194   if (ilocal) {
195     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
196   }
197   else maxlocal = nleaves;
198   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
199   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
200   for (i=0; i<nroots; i++) globals[i] = start + i;
201   for (i=0; i<maxlocal; i++) ltog[i] = -1;
202   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
203   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
204   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
205   ierr = PetscFree(globals);CHKERRQ(ierr);
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "ISLocalToGlobalMappingSetBlockSize"
211 /*@
212     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
213 
214     Not collective
215 
216     Input Parameters:
217 .   mapping - mapping data structure
218 .   bs - the blocksize
219 
220     Level: advanced
221 
222     Concepts: mapping^local to global
223 
224 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
225 @*/
226 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
227 {
228   PetscInt       *nid;
229   const PetscInt *oid;
230   PetscInt       i,cn,on,obs,nn;
231   PetscErrorCode ierr;
232 
233   PetscFunctionBegin;
234   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
235   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
236   if (bs == mapping->bs) PetscFunctionReturn(0);
237   on  = mapping->n;
238   obs = mapping->bs;
239   oid = mapping->indices;
240   nn  = (on*obs)/bs;
241   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
242 
243   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
244   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
245   for (i=0;i<nn;i++) {
246     PetscInt j;
247     for (j=0,cn=0;j<bs-1;j++) {
248       if (oid[i*bs+j] < 0) { cn++; continue; }
249       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
250     }
251     if (oid[i*bs+j] < 0) cn++;
252     if (cn) {
253       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
254       nid[i] = -1;
255     } else {
256       nid[i] = oid[i*bs]/bs;
257     }
258   }
259   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
260 
261   mapping->n           = nn;
262   mapping->bs          = bs;
263   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
264   mapping->indices     = nid;
265   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
266   mapping->globalstart = 0;
267   mapping->globalend   = 0;
268   PetscFunctionReturn(0);
269 }
270 
271 #undef __FUNCT__
272 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
273 /*@
274     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
275     ordering and a global parallel ordering.
276 
277     Not Collective
278 
279     Input Parameters:
280 .   mapping - mapping data structure
281 
282     Output Parameter:
283 .   bs - the blocksize
284 
285     Level: advanced
286 
287     Concepts: mapping^local to global
288 
289 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
290 @*/
291 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
292 {
293   PetscFunctionBegin;
294   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
295   *bs = mapping->bs;
296   PetscFunctionReturn(0);
297 }
298 
299 #undef __FUNCT__
300 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
301 /*@
302     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
303     ordering and a global parallel ordering.
304 
305     Not Collective, but communicator may have more than one process
306 
307     Input Parameters:
308 +   comm - MPI communicator
309 .   bs - the block size
310 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
311 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
312 -   mode - see PetscCopyMode
313 
314     Output Parameter:
315 .   mapping - new mapping data structure
316 
317     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
318     Level: advanced
319 
320     Concepts: mapping^local to global
321 
322 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
323 @*/
324 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
325 {
326   PetscErrorCode ierr;
327   PetscInt       *in;
328 
329   PetscFunctionBegin;
330   if (n) PetscValidIntPointer(indices,3);
331   PetscValidPointer(mapping,4);
332 
333   *mapping = NULL;
334   ierr = ISInitializePackage();CHKERRQ(ierr);
335 
336   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
337                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
338   (*mapping)->n             = n;
339   (*mapping)->bs            = bs;
340   (*mapping)->info_cached   = PETSC_FALSE;
341   (*mapping)->info_free     = PETSC_FALSE;
342   (*mapping)->info_procs    = NULL;
343   (*mapping)->info_numprocs = NULL;
344   (*mapping)->info_indices  = NULL;
345   /*
346     Do not create the global to local mapping. This is only created if
347     ISGlobalToLocalMapping() is called
348   */
349   (*mapping)->globals = 0;
350   if (mode == PETSC_COPY_VALUES) {
351     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
352     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
353     (*mapping)->indices = in;
354     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
355   } else if (mode == PETSC_OWN_POINTER) {
356     (*mapping)->indices = (PetscInt*)indices;
357     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
358   }
359   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
360   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
366 /*@
367    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
368    ordering and a global parallel ordering.
369 
370    Note Collective
371 
372    Input Parameters:
373 .  mapping - mapping data structure
374 
375    Level: advanced
376 
377 .seealso: ISLocalToGlobalMappingCreate()
378 @*/
379 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
380 {
381   PetscErrorCode ierr;
382 
383   PetscFunctionBegin;
384   if (!*mapping) PetscFunctionReturn(0);
385   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
386   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
387   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
388   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
389   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
390   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
391   if ((*mapping)->info_indices) {
392     PetscInt i;
393 
394     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
395     for (i=1; i<(*mapping)->info_nproc; i++) {
396       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
397     }
398     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
399   }
400   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
401   *mapping = 0;
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
407 /*@
408     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
409     a new index set using the global numbering defined in an ISLocalToGlobalMapping
410     context.
411 
412     Not collective
413 
414     Input Parameters:
415 +   mapping - mapping between local and global numbering
416 -   is - index set in local numbering
417 
418     Output Parameters:
419 .   newis - index set in global numbering
420 
421     Level: advanced
422 
423     Concepts: mapping^local to global
424 
425 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
426           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
427 @*/
428 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
429 {
430   PetscErrorCode ierr;
431   PetscInt       n,*idxout;
432   const PetscInt *idxin;
433 
434   PetscFunctionBegin;
435   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
436   PetscValidHeaderSpecific(is,IS_CLASSID,2);
437   PetscValidPointer(newis,3);
438 
439   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
440   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
441   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
442   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
443   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
444   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
445   PetscFunctionReturn(0);
446 }
447 
448 #undef __FUNCT__
449 #define __FUNCT__ "ISLocalToGlobalMappingApply"
450 /*@
451    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
452    and converts them to the global numbering.
453 
454    Not collective
455 
456    Input Parameters:
457 +  mapping - the local to global mapping context
458 .  N - number of integers
459 -  in - input indices in local numbering
460 
461    Output Parameter:
462 .  out - indices in global numbering
463 
464    Notes:
465    The in and out array parameters may be identical.
466 
467    Level: advanced
468 
469 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
470           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
471           AOPetscToApplication(), ISGlobalToLocalMappingApply()
472 
473     Concepts: mapping^local to global
474 @*/
475 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
476 {
477   PetscInt i,bs,Nmax;
478 
479   PetscFunctionBegin;
480   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
481   bs   = mapping->bs;
482   Nmax = bs*mapping->n;
483   if (bs == 1) {
484     const PetscInt *idx = mapping->indices;
485     for (i=0; i<N; i++) {
486       if (in[i] < 0) {
487         out[i] = in[i];
488         continue;
489       }
490       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
491       out[i] = idx[in[i]];
492     }
493   } else {
494     const PetscInt *idx = mapping->indices;
495     for (i=0; i<N; i++) {
496       if (in[i] < 0) {
497         out[i] = in[i];
498         continue;
499       }
500       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
501       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
502     }
503   }
504   PetscFunctionReturn(0);
505 }
506 
507 #undef __FUNCT__
508 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
509 /*@
510    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
511 
512    Not collective
513 
514    Input Parameters:
515 +  mapping - the local to global mapping context
516 .  N - number of integers
517 -  in - input indices in local block numbering
518 
519    Output Parameter:
520 .  out - indices in global block numbering
521 
522    Notes:
523    The in and out array parameters may be identical.
524 
525    Example:
526      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
527      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
528 
529    Level: advanced
530 
531 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
532           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
533           AOPetscToApplication(), ISGlobalToLocalMappingApply()
534 
535     Concepts: mapping^local to global
536 @*/
537 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
538 {
539 
540   PetscFunctionBegin;
541   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
542   {
543     PetscInt i,Nmax = mapping->n;
544     const PetscInt *idx = mapping->indices;
545 
546     for (i=0; i<N; i++) {
547       if (in[i] < 0) {
548         out[i] = in[i];
549         continue;
550       }
551       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
552       out[i] = idx[in[i]];
553     }
554   }
555   PetscFunctionReturn(0);
556 }
557 
558 /* -----------------------------------------------------------------------------------------*/
559 
560 #undef __FUNCT__
561 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
562 /*
563     Creates the global fields in the ISLocalToGlobalMapping structure
564 */
565 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
566 {
567   PetscErrorCode ierr;
568   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
569 
570   PetscFunctionBegin;
571   end   = 0;
572   start = PETSC_MAX_INT;
573 
574   for (i=0; i<n; i++) {
575     if (idx[i] < 0) continue;
576     if (idx[i] < start) start = idx[i];
577     if (idx[i] > end)   end   = idx[i];
578   }
579   if (start > end) {start = 0; end = -1;}
580   mapping->globalstart = start;
581   mapping->globalend   = end;
582 
583   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
584   mapping->globals = globals;
585   for (i=0; i<end-start+1; i++) globals[i] = -1;
586   for (i=0; i<n; i++) {
587     if (idx[i] < 0) continue;
588     globals[idx[i] - start] = i;
589   }
590 
591   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
592   PetscFunctionReturn(0);
593 }
594 
595 #undef __FUNCT__
596 #define __FUNCT__ "ISGlobalToLocalMappingApply"
597 /*@
598     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
599     specified with a global numbering.
600 
601     Not collective
602 
603     Input Parameters:
604 +   mapping - mapping between local and global numbering
605 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
606            IS_GTOLM_DROP - drops the indices with no local value from the output list
607 .   n - number of global indices to map
608 -   idx - global indices to map
609 
610     Output Parameters:
611 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
612 -   idxout - local index of each global index, one must pass in an array long enough
613              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
614              idxout == NULL to determine the required length (returned in nout)
615              and then allocate the required space and call ISGlobalToLocalMappingApply()
616              a second time to set the values.
617 
618     Notes:
619     Either nout or idxout may be NULL. idx and idxout may be identical.
620 
621     This is not scalable in memory usage. Each processor requires O(Nglobal) size
622     array to compute these.
623 
624     Level: advanced
625 
626     Developer Note: The manual page states that idx and idxout may be identical but the calling
627        sequence declares idx as const so it cannot be the same as idxout.
628 
629     Concepts: mapping^global to local
630 
631 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
632           ISLocalToGlobalMappingDestroy()
633 @*/
634 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
635                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
636 {
637   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
638   PetscErrorCode ierr;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
642   if (!mapping->globals) {
643     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
644   }
645   globals = mapping->globals;
646   start   = mapping->globalstart;
647   end     = mapping->globalend;
648   bs      = mapping->bs;
649 
650   if (type == IS_GTOLM_MASK) {
651     if (idxout) {
652       for (i=0; i<n; i++) {
653         if (idx[i] < 0)                   idxout[i] = idx[i];
654         else if (idx[i] < bs*start)       idxout[i] = -1;
655         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
656         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
657       }
658     }
659     if (nout) *nout = n;
660   } else {
661     if (idxout) {
662       for (i=0; i<n; i++) {
663         if (idx[i] < 0) continue;
664         if (idx[i] < bs*start) continue;
665         if (idx[i] > bs*(end+1)-1) continue;
666         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
667         if (tmp < 0) continue;
668         idxout[nf++] = tmp;
669       }
670     } else {
671       for (i=0; i<n; i++) {
672         if (idx[i] < 0) continue;
673         if (idx[i] < bs*start) continue;
674         if (idx[i] > bs*(end+1)-1) continue;
675         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
676         if (tmp < 0) continue;
677         nf++;
678       }
679     }
680     if (nout) *nout = nf;
681   }
682   PetscFunctionReturn(0);
683 }
684 
685 #undef __FUNCT__
686 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
687 /*@
688     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
689     a new index set using the local numbering defined in an ISLocalToGlobalMapping
690     context.
691 
692     Not collective
693 
694     Input Parameters:
695 +   mapping - mapping between local and global numbering
696 -   is - index set in global numbering
697 
698     Output Parameters:
699 .   newis - index set in local numbering
700 
701     Level: advanced
702 
703     Concepts: mapping^local to global
704 
705 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
706           ISLocalToGlobalMappingDestroy()
707 @*/
708 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
709 {
710   PetscErrorCode ierr;
711   PetscInt       n,nout,*idxout;
712   const PetscInt *idxin;
713 
714   PetscFunctionBegin;
715   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
716   PetscValidHeaderSpecific(is,IS_CLASSID,3);
717   PetscValidPointer(newis,4);
718 
719   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
720   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
721   if (type == IS_GTOLM_MASK) {
722     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
723   } else {
724     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
725     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
726   }
727   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
728   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
729   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
730   PetscFunctionReturn(0);
731 }
732 
733 #undef __FUNCT__
734 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
735 /*@
736     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
737     specified with a block global numbering.
738 
739     Not collective
740 
741     Input Parameters:
742 +   mapping - mapping between local and global numbering
743 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
744            IS_GTOLM_DROP - drops the indices with no local value from the output list
745 .   n - number of global indices to map
746 -   idx - global indices to map
747 
748     Output Parameters:
749 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
750 -   idxout - local index of each global index, one must pass in an array long enough
751              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
752              idxout == NULL to determine the required length (returned in nout)
753              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
754              a second time to set the values.
755 
756     Notes:
757     Either nout or idxout may be NULL. idx and idxout may be identical.
758 
759     This is not scalable in memory usage. Each processor requires O(Nglobal) size
760     array to compute these.
761 
762     Level: advanced
763 
764     Developer Note: The manual page states that idx and idxout may be identical but the calling
765        sequence declares idx as const so it cannot be the same as idxout.
766 
767     Concepts: mapping^global to local
768 
769 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
770           ISLocalToGlobalMappingDestroy()
771 @*/
772 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
773                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
774 {
775   PetscInt       i,*globals,nf = 0,tmp,start,end;
776   PetscErrorCode ierr;
777 
778   PetscFunctionBegin;
779   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
780   if (!mapping->globals) {
781     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
782   }
783   globals = mapping->globals;
784   start   = mapping->globalstart;
785   end     = mapping->globalend;
786 
787   if (type == IS_GTOLM_MASK) {
788     if (idxout) {
789       for (i=0; i<n; i++) {
790         if (idx[i] < 0) idxout[i] = idx[i];
791         else if (idx[i] < start) idxout[i] = -1;
792         else if (idx[i] > end)   idxout[i] = -1;
793         else                     idxout[i] = globals[idx[i] - start];
794       }
795     }
796     if (nout) *nout = n;
797   } else {
798     if (idxout) {
799       for (i=0; i<n; i++) {
800         if (idx[i] < 0) continue;
801         if (idx[i] < start) continue;
802         if (idx[i] > end) continue;
803         tmp = globals[idx[i] - start];
804         if (tmp < 0) continue;
805         idxout[nf++] = tmp;
806       }
807     } else {
808       for (i=0; i<n; i++) {
809         if (idx[i] < 0) continue;
810         if (idx[i] < start) continue;
811         if (idx[i] > end) continue;
812         tmp = globals[idx[i] - start];
813         if (tmp < 0) continue;
814         nf++;
815       }
816     }
817     if (nout) *nout = nf;
818   }
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
824 /*@C
825     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
826      each index shared by more than one processor
827 
828     Collective on ISLocalToGlobalMapping
829 
830     Input Parameters:
831 .   mapping - the mapping from local to global indexing
832 
833     Output Parameter:
834 +   nproc - number of processors that are connected to this one
835 .   proc - neighboring processors
836 .   numproc - number of indices for each subdomain (processor)
837 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
838 
839     Level: advanced
840 
841     Concepts: mapping^local to global
842 
843     Fortran Usage:
844 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
845 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
846           PetscInt indices[nproc][numprocmax],ierr)
847         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
848         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
849 
850 
851 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
852           ISLocalToGlobalMappingRestoreInfo()
853 @*/
854 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
855 {
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
860   if (mapping->info_cached) {
861     *nproc = mapping->info_nproc;
862     *procs = mapping->info_procs;
863     *numprocs = mapping->info_numprocs;
864     *indices = mapping->info_indices;
865   } else {
866     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
867   }
868   PetscFunctionReturn(0);
869 }
870 
871 #undef __FUNCT__
872 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
873 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
874 {
875   PetscErrorCode ierr;
876   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
877   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
878   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
879   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
880   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
881   PetscInt       first_procs,first_numprocs,*first_indices;
882   MPI_Request    *recv_waits,*send_waits;
883   MPI_Status     recv_status,*send_status,*recv_statuses;
884   MPI_Comm       comm;
885   PetscBool      debug = PETSC_FALSE;
886 
887   PetscFunctionBegin;
888   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
889   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
890   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
891   if (size == 1) {
892     *nproc         = 0;
893     *procs         = NULL;
894     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
895     (*numprocs)[0] = 0;
896     ierr           = PetscNew(indices);CHKERRQ(ierr);
897     (*indices)[0]  = NULL;
898     /* save info for reuse */
899     mapping->info_nproc = *nproc;
900     mapping->info_procs = *procs;
901     mapping->info_numprocs = *numprocs;
902     mapping->info_indices = *indices;
903     mapping->info_cached = PETSC_TRUE;
904     PetscFunctionReturn(0);
905   }
906 
907   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
908 
909   /*
910     Notes on ISLocalToGlobalMappingGetBlockInfo
911 
912     globally owned node - the nodes that have been assigned to this processor in global
913            numbering, just for this routine.
914 
915     nontrivial globally owned node - node assigned to this processor that is on a subdomain
916            boundary (i.e. is has more than one local owner)
917 
918     locally owned node - node that exists on this processors subdomain
919 
920     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
921            local subdomain
922   */
923   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
924   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
925   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
926 
927   for (i=0; i<n; i++) {
928     if (lindices[i] > max) max = lindices[i];
929   }
930   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
931   Ng++;
932   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
933   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
934   scale  = Ng/size + 1;
935   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
936   rstart = scale*rank;
937 
938   /* determine ownership ranges of global indices */
939   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
940   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
941 
942   /* determine owners of each local node  */
943   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
944   for (i=0; i<n; i++) {
945     proc             = lindices[i]/scale; /* processor that globally owns this index */
946     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
947     owner[i]         = proc;
948     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
949   }
950   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
951   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
952 
953   /* inform other processors of number of messages and max length*/
954   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
955   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
956 
957   /* post receives for owned rows */
958   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
959   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
960   for (i=0; i<nrecvs; i++) {
961     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
962   }
963 
964   /* pack messages containing lists of local nodes to owners */
965   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
966   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
967   starts[0] = 0;
968   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
969   for (i=0; i<n; i++) {
970     sends[starts[owner[i]]++] = lindices[i];
971     sends[starts[owner[i]]++] = i;
972   }
973   ierr = PetscFree(owner);CHKERRQ(ierr);
974   starts[0] = 0;
975   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
976 
977   /* send the messages */
978   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
979   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
980   cnt = 0;
981   for (i=0; i<size; i++) {
982     if (nprocs[2*i]) {
983       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
984       dest[cnt] = i;
985       cnt++;
986     }
987   }
988   ierr = PetscFree(starts);CHKERRQ(ierr);
989 
990   /* wait on receives */
991   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
992   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
993   cnt  = nrecvs;
994   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
995   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
996   while (cnt) {
997     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
998     /* unpack receives into our local space */
999     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1000     source[imdex] = recv_status.MPI_SOURCE;
1001     len[imdex]    = len[imdex]/2;
1002     /* count how many local owners for each of my global owned indices */
1003     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1004     cnt--;
1005   }
1006   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1007 
1008   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1009   nowned  = 0;
1010   nownedm = 0;
1011   for (i=0; i<ng; i++) {
1012     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1013   }
1014 
1015   /* create single array to contain rank of all local owners of each globally owned index */
1016   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1017   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1018   starts[0] = 0;
1019   for (i=1; i<ng; i++) {
1020     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1021     else starts[i] = starts[i-1];
1022   }
1023 
1024   /* for each nontrival globally owned node list all arriving processors */
1025   for (i=0; i<nrecvs; i++) {
1026     for (j=0; j<len[i]; j++) {
1027       node = recvs[2*i*nmax+2*j]-rstart;
1028       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1029     }
1030   }
1031 
1032   if (debug) { /* -----------------------------------  */
1033     starts[0] = 0;
1034     for (i=1; i<ng; i++) {
1035       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1036       else starts[i] = starts[i-1];
1037     }
1038     for (i=0; i<ng; i++) {
1039       if (nownedsenders[i] > 1) {
1040         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1041         for (j=0; j<nownedsenders[i]; j++) {
1042           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1043         }
1044         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1045       }
1046     }
1047     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1048   } /* -----------------------------------  */
1049 
1050   /* wait on original sends */
1051   if (nsends) {
1052     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1053     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1054     ierr = PetscFree(send_status);CHKERRQ(ierr);
1055   }
1056   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1057   ierr = PetscFree(sends);CHKERRQ(ierr);
1058   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1059 
1060   /* pack messages to send back to local owners */
1061   starts[0] = 0;
1062   for (i=1; i<ng; i++) {
1063     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1064     else starts[i] = starts[i-1];
1065   }
1066   nsends2 = nrecvs;
1067   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1068   for (i=0; i<nrecvs; i++) {
1069     nprocs[i] = 1;
1070     for (j=0; j<len[i]; j++) {
1071       node = recvs[2*i*nmax+2*j]-rstart;
1072       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1073     }
1074   }
1075   nt = 0;
1076   for (i=0; i<nsends2; i++) nt += nprocs[i];
1077 
1078   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1079   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1080 
1081   starts2[0] = 0;
1082   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1083   /*
1084      Each message is 1 + nprocs[i] long, and consists of
1085        (0) the number of nodes being sent back
1086        (1) the local node number,
1087        (2) the number of processors sharing it,
1088        (3) the processors sharing it
1089   */
1090   for (i=0; i<nsends2; i++) {
1091     cnt = 1;
1092     sends2[starts2[i]] = 0;
1093     for (j=0; j<len[i]; j++) {
1094       node = recvs[2*i*nmax+2*j]-rstart;
1095       if (nownedsenders[node] > 1) {
1096         sends2[starts2[i]]++;
1097         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1098         sends2[starts2[i]+cnt++] = nownedsenders[node];
1099         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1100         cnt += nownedsenders[node];
1101       }
1102     }
1103   }
1104 
1105   /* receive the message lengths */
1106   nrecvs2 = nsends;
1107   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1108   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1109   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1110   for (i=0; i<nrecvs2; i++) {
1111     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1112   }
1113 
1114   /* send the message lengths */
1115   for (i=0; i<nsends2; i++) {
1116     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1117   }
1118 
1119   /* wait on receives of lens */
1120   if (nrecvs2) {
1121     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1122     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1123     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1124   }
1125   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1126 
1127   starts3[0] = 0;
1128   nt         = 0;
1129   for (i=0; i<nrecvs2-1; i++) {
1130     starts3[i+1] = starts3[i] + lens2[i];
1131     nt          += lens2[i];
1132   }
1133   if (nrecvs2) nt += lens2[nrecvs2-1];
1134 
1135   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1136   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1137   for (i=0; i<nrecvs2; i++) {
1138     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1139   }
1140 
1141   /* send the messages */
1142   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1143   for (i=0; i<nsends2; i++) {
1144     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1145   }
1146 
1147   /* wait on receives */
1148   if (nrecvs2) {
1149     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1150     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1151     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1152   }
1153   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1154   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1155 
1156   if (debug) { /* -----------------------------------  */
1157     cnt = 0;
1158     for (i=0; i<nrecvs2; i++) {
1159       nt = recvs2[cnt++];
1160       for (j=0; j<nt; j++) {
1161         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1162         for (k=0; k<recvs2[cnt+1]; k++) {
1163           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1164         }
1165         cnt += 2 + recvs2[cnt+1];
1166         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1167       }
1168     }
1169     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1170   } /* -----------------------------------  */
1171 
1172   /* count number subdomains for each local node */
1173   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1174   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1175   cnt  = 0;
1176   for (i=0; i<nrecvs2; i++) {
1177     nt = recvs2[cnt++];
1178     for (j=0; j<nt; j++) {
1179       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1180       cnt += 2 + recvs2[cnt+1];
1181     }
1182   }
1183   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1184   *nproc    = nt;
1185   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1186   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1187   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1188   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1189   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1190   cnt  = 0;
1191   for (i=0; i<size; i++) {
1192     if (nprocs[i] > 0) {
1193       bprocs[i]        = cnt;
1194       (*procs)[cnt]    = i;
1195       (*numprocs)[cnt] = nprocs[i];
1196       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1197       cnt++;
1198     }
1199   }
1200 
1201   /* make the list of subdomains for each nontrivial local node */
1202   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1203   cnt  = 0;
1204   for (i=0; i<nrecvs2; i++) {
1205     nt = recvs2[cnt++];
1206     for (j=0; j<nt; j++) {
1207       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1208       cnt += 2 + recvs2[cnt+1];
1209     }
1210   }
1211   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1212   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1213 
1214   /* sort the node indexing by their global numbers */
1215   nt = *nproc;
1216   for (i=0; i<nt; i++) {
1217     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1218     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1219     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1220     ierr = PetscFree(tmp);CHKERRQ(ierr);
1221   }
1222 
1223   if (debug) { /* -----------------------------------  */
1224     nt = *nproc;
1225     for (i=0; i<nt; i++) {
1226       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1227       for (j=0; j<(*numprocs)[i]; j++) {
1228         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1229       }
1230       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1231     }
1232     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1233   } /* -----------------------------------  */
1234 
1235   /* wait on sends */
1236   if (nsends2) {
1237     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1238     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1239     ierr = PetscFree(send_status);CHKERRQ(ierr);
1240   }
1241 
1242   ierr = PetscFree(starts3);CHKERRQ(ierr);
1243   ierr = PetscFree(dest);CHKERRQ(ierr);
1244   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1245 
1246   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1247   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1248   ierr = PetscFree(starts);CHKERRQ(ierr);
1249   ierr = PetscFree(starts2);CHKERRQ(ierr);
1250   ierr = PetscFree(lens2);CHKERRQ(ierr);
1251 
1252   ierr = PetscFree(source);CHKERRQ(ierr);
1253   ierr = PetscFree(len);CHKERRQ(ierr);
1254   ierr = PetscFree(recvs);CHKERRQ(ierr);
1255   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1256   ierr = PetscFree(sends2);CHKERRQ(ierr);
1257 
1258   /* put the information about myself as the first entry in the list */
1259   first_procs    = (*procs)[0];
1260   first_numprocs = (*numprocs)[0];
1261   first_indices  = (*indices)[0];
1262   for (i=0; i<*nproc; i++) {
1263     if ((*procs)[i] == rank) {
1264       (*procs)[0]    = (*procs)[i];
1265       (*numprocs)[0] = (*numprocs)[i];
1266       (*indices)[0]  = (*indices)[i];
1267       (*procs)[i]    = first_procs;
1268       (*numprocs)[i] = first_numprocs;
1269       (*indices)[i]  = first_indices;
1270       break;
1271     }
1272   }
1273 
1274   /* save info for reuse */
1275   mapping->info_nproc = *nproc;
1276   mapping->info_procs = *procs;
1277   mapping->info_numprocs = *numprocs;
1278   mapping->info_indices = *indices;
1279   mapping->info_cached = PETSC_TRUE;
1280   PetscFunctionReturn(0);
1281 }
1282 
1283 #undef __FUNCT__
1284 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1285 /*@C
1286     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1287 
1288     Collective on ISLocalToGlobalMapping
1289 
1290     Input Parameters:
1291 .   mapping - the mapping from local to global indexing
1292 
1293     Output Parameter:
1294 +   nproc - number of processors that are connected to this one
1295 .   proc - neighboring processors
1296 .   numproc - number of indices for each processor
1297 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1298 
1299     Level: advanced
1300 
1301 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1302           ISLocalToGlobalMappingGetInfo()
1303 @*/
1304 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1305 {
1306   PetscErrorCode ierr;
1307 
1308   PetscFunctionBegin;
1309   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1310   if (mapping->info_free) {
1311     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1312     if (*indices) {
1313       PetscInt i;
1314 
1315       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1316       for (i=1; i<*nproc; i++) {
1317         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1318       }
1319       ierr = PetscFree(*indices);CHKERRQ(ierr);
1320     }
1321   }
1322   *nproc = 0;
1323   *procs = NULL;
1324   *numprocs = NULL;
1325   *indices = NULL;
1326   PetscFunctionReturn(0);
1327 }
1328 
1329 #undef __FUNCT__
1330 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1331 /*@C
1332     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1333      each index shared by more than one processor
1334 
1335     Collective on ISLocalToGlobalMapping
1336 
1337     Input Parameters:
1338 .   mapping - the mapping from local to global indexing
1339 
1340     Output Parameter:
1341 +   nproc - number of processors that are connected to this one
1342 .   proc - neighboring processors
1343 .   numproc - number of indices for each subdomain (processor)
1344 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1345 
1346     Level: advanced
1347 
1348     Concepts: mapping^local to global
1349 
1350     Fortran Usage:
1351 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1352 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1353           PetscInt indices[nproc][numprocmax],ierr)
1354         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1355         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1356 
1357 
1358 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1359           ISLocalToGlobalMappingRestoreInfo()
1360 @*/
1361 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1362 {
1363   PetscErrorCode ierr;
1364   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1365 
1366   PetscFunctionBegin;
1367   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1368   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1369   if (bs > 1) { /* we need to expand the cached info */
1370     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1371     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1372     for (i=0; i<*nproc; i++) {
1373       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1374       for (j=0; j<bnumprocs[i]; j++) {
1375         for (k=0; k<bs; k++) {
1376           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1377         }
1378       }
1379       (*numprocs)[i] = bnumprocs[i]*bs;
1380     }
1381     mapping->info_free = PETSC_TRUE;
1382   } else {
1383     *numprocs = bnumprocs;
1384     *indices  = bindices;
1385   }
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 #undef __FUNCT__
1390 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1391 /*@C
1392     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1393 
1394     Collective on ISLocalToGlobalMapping
1395 
1396     Input Parameters:
1397 .   mapping - the mapping from local to global indexing
1398 
1399     Output Parameter:
1400 +   nproc - number of processors that are connected to this one
1401 .   proc - neighboring processors
1402 .   numproc - number of indices for each processor
1403 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1404 
1405     Level: advanced
1406 
1407 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1408           ISLocalToGlobalMappingGetInfo()
1409 @*/
1410 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1411 {
1412   PetscErrorCode ierr;
1413 
1414   PetscFunctionBegin;
1415   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 #undef __FUNCT__
1420 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1421 /*@C
1422    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1423 
1424    Not Collective
1425 
1426    Input Arguments:
1427 . ltog - local to global mapping
1428 
1429    Output Arguments:
1430 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1431 
1432    Level: advanced
1433 
1434    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1435 
1436 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1437 @*/
1438 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1439 {
1440   PetscFunctionBegin;
1441   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1442   PetscValidPointer(array,2);
1443   if (ltog->bs == 1) {
1444     *array = ltog->indices;
1445   } else {
1446     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1447     const PetscInt *ii;
1448     PetscErrorCode ierr;
1449 
1450     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1451     *array = jj;
1452     k    = 0;
1453     ii   = ltog->indices;
1454     for (i=0; i<n; i++)
1455       for (j=0; j<bs; j++)
1456         jj[k++] = bs*ii[i] + j;
1457   }
1458   PetscFunctionReturn(0);
1459 }
1460 
1461 #undef __FUNCT__
1462 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1463 /*@C
1464    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1465 
1466    Not Collective
1467 
1468    Input Arguments:
1469 + ltog - local to global mapping
1470 - array - array of indices
1471 
1472    Level: advanced
1473 
1474 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1475 @*/
1476 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1477 {
1478   PetscFunctionBegin;
1479   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1480   PetscValidPointer(array,2);
1481   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1482 
1483   if (ltog->bs > 1) {
1484     PetscErrorCode ierr;
1485     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1486   }
1487   PetscFunctionReturn(0);
1488 }
1489 
1490 #undef __FUNCT__
1491 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1492 /*@C
1493    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1494 
1495    Not Collective
1496 
1497    Input Arguments:
1498 . ltog - local to global mapping
1499 
1500    Output Arguments:
1501 . array - array of indices
1502 
1503    Level: advanced
1504 
1505 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1506 @*/
1507 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1508 {
1509   PetscFunctionBegin;
1510   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1511   PetscValidPointer(array,2);
1512   *array = ltog->indices;
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 #undef __FUNCT__
1517 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1518 /*@C
1519    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1520 
1521    Not Collective
1522 
1523    Input Arguments:
1524 + ltog - local to global mapping
1525 - array - array of indices
1526 
1527    Level: advanced
1528 
1529 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1530 @*/
1531 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1532 {
1533   PetscFunctionBegin;
1534   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1535   PetscValidPointer(array,2);
1536   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1537   *array = NULL;
1538   PetscFunctionReturn(0);
1539 }
1540 
1541 #undef __FUNCT__
1542 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1543 /*@C
1544    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1545 
1546    Not Collective
1547 
1548    Input Arguments:
1549 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1550 . n - number of mappings to concatenate
1551 - ltogs - local to global mappings
1552 
1553    Output Arguments:
1554 . ltogcat - new mapping
1555 
1556    Note: this currently always returns a mapping with block size of 1
1557 
1558    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1559 
1560    Level: advanced
1561 
1562 .seealso: ISLocalToGlobalMappingCreate()
1563 @*/
1564 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1565 {
1566   PetscInt       i,cnt,m,*idx;
1567   PetscErrorCode ierr;
1568 
1569   PetscFunctionBegin;
1570   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1571   if (n > 0) PetscValidPointer(ltogs,3);
1572   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1573   PetscValidPointer(ltogcat,4);
1574   for (cnt=0,i=0; i<n; i++) {
1575     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1576     cnt += m;
1577   }
1578   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1579   for (cnt=0,i=0; i<n; i++) {
1580     const PetscInt *subidx;
1581     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1582     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1583     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1584     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1585     cnt += m;
1586   }
1587   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1588   PetscFunctionReturn(0);
1589 }
1590 
1591 
1592