xref: /petsc/src/vec/is/utils/isltog.c (revision 8b7cb0e67edc0ad3c573fa4d3743f885f6250459)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
11 /*@
12     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
13 
14     Not Collective
15 
16     Input Parameter:
17 .   ltog - local to global mapping
18 
19     Output Parameter:
20 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
21 
22     Level: advanced
23 
24     Concepts: mapping^local to global
25 
26 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
27 @*/
28 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
29 {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
32   PetscValidIntPointer(n,2);
33   *n = mapping->bs*mapping->n;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "ISLocalToGlobalMappingView"
39 /*@C
40     ISLocalToGlobalMappingView - View a local to global mapping
41 
42     Not Collective
43 
44     Input Parameters:
45 +   ltog - local to global mapping
46 -   viewer - viewer
47 
48     Level: advanced
49 
50     Concepts: mapping^local to global
51 
52 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
53 @*/
54 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
55 {
56   PetscInt       i;
57   PetscMPIInt    rank;
58   PetscBool      iascii;
59   PetscErrorCode ierr;
60 
61   PetscFunctionBegin;
62   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
63   if (!viewer) {
64     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
65   }
66   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
67 
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
69   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
70   if (iascii) {
71     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
72     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73     for (i=0; i<mapping->n; i++) {
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
75     }
76     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
78   }
79   PetscFunctionReturn(0);
80 }
81 
82 #undef __FUNCT__
83 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
84 /*@
85     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
86     ordering and a global parallel ordering.
87 
88     Not collective
89 
90     Input Parameter:
91 .   is - index set containing the global numbers for each local number
92 
93     Output Parameter:
94 .   mapping - new mapping data structure
95 
96     Notes: the block size of the IS determines the block size of the mapping
97     Level: advanced
98 
99     Concepts: mapping^local to global
100 
101 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
102 @*/
103 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
104 {
105   PetscErrorCode ierr;
106   PetscInt       n,bs;
107   const PetscInt *indices;
108   MPI_Comm       comm;
109   PetscBool      isblock;
110 
111   PetscFunctionBegin;
112   PetscValidHeaderSpecific(is,IS_CLASSID,1);
113   PetscValidPointer(mapping,2);
114 
115   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
116   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
117   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
118   if (!isblock) {
119     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
121     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
122   } else {
123     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
124     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
126     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
133 /*@C
134     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
135     ordering and a global parallel ordering.
136 
137     Collective
138 
139     Input Parameter:
140 +   sf - star forest mapping contiguous local indices to (rank, offset)
141 -   start - first global index on this process
142 
143     Output Parameter:
144 .   mapping - new mapping data structure
145 
146     Level: advanced
147 
148     Concepts: mapping^local to global
149 
150 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
151 @*/
152 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
153 {
154   PetscErrorCode ierr;
155   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
156   const PetscInt *ilocal;
157   MPI_Comm       comm;
158 
159   PetscFunctionBegin;
160   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
161   PetscValidPointer(mapping,3);
162 
163   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
164   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
165   if (ilocal) {
166     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
167   }
168   else maxlocal = nleaves;
169   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
170   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
171   for (i=0; i<nroots; i++) globals[i] = start + i;
172   for (i=0; i<maxlocal; i++) ltog[i] = -1;
173   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
174   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
175   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
176   ierr = PetscFree(globals);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "ISLocalToGlobalMappingSetBlockSize"
182 /*@
183     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
184 
185     Not collective
186 
187     Input Parameters:
188 .   mapping - mapping data structure
189 .   bs - the blocksize
190 
191     Level: advanced
192 
193     Concepts: mapping^local to global
194 
195 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
196 @*/
197 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
198 {
199   PetscInt       *nid,*oid;
200   PetscInt       i,cn,on,obs,nn,cum,r,rbs,ri;
201   PetscErrorCode ierr;
202 
203   PetscFunctionBegin;
204   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
205   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
206   if (bs == mapping->bs) PetscFunctionReturn(0);
207   on  = mapping->n;
208   obs = mapping->bs;
209   oid = mapping->indices;
210   nn  = (on*obs)/bs;
211   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
212   if (bs < obs && obs%bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D",bs,obs);
213   if (bs < obs) {
214     r   = bs;
215     rbs = obs/bs;
216     ri  = 1;
217   } else {
218     r   = obs;
219     rbs = 1;
220     ri  = bs/obs;
221   }
222   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
223   for (i=0,cum=0,cn=0;i<on;i++) {
224     if (oid[i] < 0) cn += r;
225     if ((cn && !(cn%bs)) || (oid[i] > -1 && !((oid[i]*obs)%bs))) {
226       if (cum == nn) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices",bs,obs);
227       if (cn && cn%bs) SETERRQ5(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices (neg = %D, rbs %D, r %D)",bs,obs,cn,rbs,r);
228       if (cn) {
229         PetscInt j;
230         for (j=0;j<rbs;j++) nid[cum++] = -1;
231       } else {
232         PetscInt j;
233         for (j=1;j<ri;j++) {
234           if (i+j >= on) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices found",bs,obs);
235           if (oid[i] != oid[i+j]-j) SETERRQ6(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D (%D,%D)",bs,obs,oid[i],oid[i+j],j,ri);
236         }
237         for (j=0;j<rbs;j++) nid[cum++] = (oid[i]*obs)/bs+j;
238       }
239       cn   = 0;
240     }
241   }
242   if (cum != nn || cn) {
243     ierr = PetscFree(nid);CHKERRQ(ierr);
244     SETERRQ6(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Incompatible block sizes %D and %D (new block indices found %D != %D, neg %D, rbs %D)",bs,obs,cum,nn,cn,rbs);
245   }
246   mapping->n           = nn;
247   mapping->bs          = bs;
248   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
249   mapping->indices     = nid;
250   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
251   mapping->globalstart = 0;
252   mapping->globalend   = 0;
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
258 /*@
259     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
260     ordering and a global parallel ordering.
261 
262     Not Collective
263 
264     Input Parameters:
265 .   mapping - mapping data structure
266 
267     Output Parameter:
268 .   bs - the blocksize
269 
270     Level: advanced
271 
272     Concepts: mapping^local to global
273 
274 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
275 @*/
276 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
277 {
278   PetscFunctionBegin;
279   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
280   *bs = mapping->bs;
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNCT__
285 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
286 /*@
287     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
288     ordering and a global parallel ordering.
289 
290     Not Collective, but communicator may have more than one process
291 
292     Input Parameters:
293 +   comm - MPI communicator
294 .   bs - the block size
295 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
296 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
297 -   mode - see PetscCopyMode
298 
299     Output Parameter:
300 .   mapping - new mapping data structure
301 
302     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
303     Level: advanced
304 
305     Concepts: mapping^local to global
306 
307 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
308 @*/
309 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
310 {
311   PetscErrorCode ierr;
312   PetscInt       *in;
313 
314   PetscFunctionBegin;
315   if (n) PetscValidIntPointer(indices,3);
316   PetscValidPointer(mapping,4);
317 
318   *mapping = NULL;
319   ierr = ISInitializePackage();CHKERRQ(ierr);
320 
321   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
322                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
323   (*mapping)->n             = n;
324   (*mapping)->bs            = bs;
325   (*mapping)->info_cached   = PETSC_FALSE;
326   (*mapping)->info_free     = PETSC_FALSE;
327   (*mapping)->info_procs    = NULL;
328   (*mapping)->info_numprocs = NULL;
329   (*mapping)->info_indices  = NULL;
330   /*
331     Do not create the global to local mapping. This is only created if
332     ISGlobalToLocalMapping() is called
333   */
334   (*mapping)->globals = 0;
335   if (mode == PETSC_COPY_VALUES) {
336     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
337     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
338     (*mapping)->indices = in;
339     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
340   } else if (mode == PETSC_OWN_POINTER) {
341     (*mapping)->indices = (PetscInt*)indices;
342     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
343   }
344   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
345   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
346   PetscFunctionReturn(0);
347 }
348 
349 #undef __FUNCT__
350 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
351 /*@
352    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
353    ordering and a global parallel ordering.
354 
355    Note Collective
356 
357    Input Parameters:
358 .  mapping - mapping data structure
359 
360    Level: advanced
361 
362 .seealso: ISLocalToGlobalMappingCreate()
363 @*/
364 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
365 {
366   PetscErrorCode ierr;
367 
368   PetscFunctionBegin;
369   if (!*mapping) PetscFunctionReturn(0);
370   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
371   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
372   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
373   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
374   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
375   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
376   if ((*mapping)->info_indices) {
377     PetscInt i;
378 
379     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
380     for (i=1; i<(*mapping)->info_nproc; i++) {
381       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
382     }
383     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
384   }
385   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
386   *mapping = 0;
387   PetscFunctionReturn(0);
388 }
389 
390 #undef __FUNCT__
391 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
392 /*@
393     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
394     a new index set using the global numbering defined in an ISLocalToGlobalMapping
395     context.
396 
397     Not collective
398 
399     Input Parameters:
400 +   mapping - mapping between local and global numbering
401 -   is - index set in local numbering
402 
403     Output Parameters:
404 .   newis - index set in global numbering
405 
406     Level: advanced
407 
408     Concepts: mapping^local to global
409 
410 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
411           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
412 @*/
413 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
414 {
415   PetscErrorCode ierr;
416   PetscInt       n,*idxout;
417   const PetscInt *idxin;
418 
419   PetscFunctionBegin;
420   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
421   PetscValidHeaderSpecific(is,IS_CLASSID,2);
422   PetscValidPointer(newis,3);
423 
424   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
425   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
426   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
427   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
428   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
429   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 #undef __FUNCT__
434 #define __FUNCT__ "ISLocalToGlobalMappingApply"
435 /*@
436    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
437    and converts them to the global numbering.
438 
439    Not collective
440 
441    Input Parameters:
442 +  mapping - the local to global mapping context
443 .  N - number of integers
444 -  in - input indices in local numbering
445 
446    Output Parameter:
447 .  out - indices in global numbering
448 
449    Notes:
450    The in and out array parameters may be identical.
451 
452    Level: advanced
453 
454 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
455           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
456           AOPetscToApplication(), ISGlobalToLocalMappingApply()
457 
458     Concepts: mapping^local to global
459 @*/
460 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
461 {
462   PetscInt i,bs,Nmax;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
466   bs   = mapping->bs;
467   Nmax = bs*mapping->n;
468   if (bs == 1) {
469     const PetscInt *idx = mapping->indices;
470     for (i=0; i<N; i++) {
471       if (in[i] < 0) {
472         out[i] = in[i];
473         continue;
474       }
475       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
476       out[i] = idx[in[i]];
477     }
478   } else {
479     const PetscInt *idx = mapping->indices;
480     for (i=0; i<N; i++) {
481       if (in[i] < 0) {
482         out[i] = in[i];
483         continue;
484       }
485       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
486       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
487     }
488   }
489   PetscFunctionReturn(0);
490 }
491 
492 #undef __FUNCT__
493 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
494 /*@
495    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
496 
497    Not collective
498 
499    Input Parameters:
500 +  mapping - the local to global mapping context
501 .  N - number of integers
502 -  in - input indices in local block numbering
503 
504    Output Parameter:
505 .  out - indices in global block numbering
506 
507    Notes:
508    The in and out array parameters may be identical.
509 
510    Example:
511      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
512      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
513 
514    Level: advanced
515 
516 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
517           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
518           AOPetscToApplication(), ISGlobalToLocalMappingApply()
519 
520     Concepts: mapping^local to global
521 @*/
522 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
523 {
524 
525   PetscFunctionBegin;
526   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
527   {
528     PetscInt i,Nmax = mapping->n;
529     const PetscInt *idx = mapping->indices;
530 
531     for (i=0; i<N; i++) {
532       if (in[i] < 0) {
533         out[i] = in[i];
534         continue;
535       }
536       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
537       out[i] = idx[in[i]];
538     }
539   }
540   PetscFunctionReturn(0);
541 }
542 
543 /* -----------------------------------------------------------------------------------------*/
544 
545 #undef __FUNCT__
546 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
547 /*
548     Creates the global fields in the ISLocalToGlobalMapping structure
549 */
550 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
551 {
552   PetscErrorCode ierr;
553   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
554 
555   PetscFunctionBegin;
556   end   = 0;
557   start = PETSC_MAX_INT;
558 
559   for (i=0; i<n; i++) {
560     if (idx[i] < 0) continue;
561     if (idx[i] < start) start = idx[i];
562     if (idx[i] > end)   end   = idx[i];
563   }
564   if (start > end) {start = 0; end = -1;}
565   mapping->globalstart = start;
566   mapping->globalend   = end;
567 
568   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
569   mapping->globals = globals;
570   for (i=0; i<end-start+1; i++) globals[i] = -1;
571   for (i=0; i<n; i++) {
572     if (idx[i] < 0) continue;
573     globals[idx[i] - start] = i;
574   }
575 
576   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 #undef __FUNCT__
581 #define __FUNCT__ "ISGlobalToLocalMappingApply"
582 /*@
583     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
584     specified with a global numbering.
585 
586     Not collective
587 
588     Input Parameters:
589 +   mapping - mapping between local and global numbering
590 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
591            IS_GTOLM_DROP - drops the indices with no local value from the output list
592 .   n - number of global indices to map
593 -   idx - global indices to map
594 
595     Output Parameters:
596 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
597 -   idxout - local index of each global index, one must pass in an array long enough
598              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
599              idxout == NULL to determine the required length (returned in nout)
600              and then allocate the required space and call ISGlobalToLocalMappingApply()
601              a second time to set the values.
602 
603     Notes:
604     Either nout or idxout may be NULL. idx and idxout may be identical.
605 
606     This is not scalable in memory usage. Each processor requires O(Nglobal) size
607     array to compute these.
608 
609     Level: advanced
610 
611     Developer Note: The manual page states that idx and idxout may be identical but the calling
612        sequence declares idx as const so it cannot be the same as idxout.
613 
614     Concepts: mapping^global to local
615 
616 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
617           ISLocalToGlobalMappingDestroy()
618 @*/
619 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
620                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
621 {
622   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
623   PetscErrorCode ierr;
624 
625   PetscFunctionBegin;
626   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
627   if (!mapping->globals) {
628     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
629   }
630   globals = mapping->globals;
631   start   = mapping->globalstart;
632   end     = mapping->globalend;
633   bs      = mapping->bs;
634 
635   if (type == IS_GTOLM_MASK) {
636     if (idxout) {
637       for (i=0; i<n; i++) {
638         if (idx[i] < 0)                   idxout[i] = idx[i];
639         else if (idx[i] < bs*start)       idxout[i] = -1;
640         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
641         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
642       }
643     }
644     if (nout) *nout = n;
645   } else {
646     if (idxout) {
647       for (i=0; i<n; i++) {
648         if (idx[i] < 0) continue;
649         if (idx[i] < bs*start) continue;
650         if (idx[i] > bs*(end+1)-1) continue;
651         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
652         if (tmp < 0) continue;
653         idxout[nf++] = tmp;
654       }
655     } else {
656       for (i=0; i<n; i++) {
657         if (idx[i] < 0) continue;
658         if (idx[i] < bs*start) continue;
659         if (idx[i] > bs*(end+1)-1) continue;
660         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
661         if (tmp < 0) continue;
662         nf++;
663       }
664     }
665     if (nout) *nout = nf;
666   }
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
672 /*@
673     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
674     a new index set using the local numbering defined in an ISLocalToGlobalMapping
675     context.
676 
677     Not collective
678 
679     Input Parameters:
680 +   mapping - mapping between local and global numbering
681 -   is - index set in global numbering
682 
683     Output Parameters:
684 .   newis - index set in local numbering
685 
686     Level: advanced
687 
688     Concepts: mapping^local to global
689 
690 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
691           ISLocalToGlobalMappingDestroy()
692 @*/
693 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
694 {
695   PetscErrorCode ierr;
696   PetscInt       n,nout,*idxout;
697   const PetscInt *idxin;
698 
699   PetscFunctionBegin;
700   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
701   PetscValidHeaderSpecific(is,IS_CLASSID,3);
702   PetscValidPointer(newis,4);
703 
704   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
705   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
706   if (type == IS_GTOLM_MASK) {
707     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
708   } else {
709     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
710     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
711   }
712   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
713   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
714   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
715   PetscFunctionReturn(0);
716 }
717 
718 #undef __FUNCT__
719 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
720 /*@
721     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
722     specified with a block global numbering.
723 
724     Not collective
725 
726     Input Parameters:
727 +   mapping - mapping between local and global numbering
728 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
729            IS_GTOLM_DROP - drops the indices with no local value from the output list
730 .   n - number of global indices to map
731 -   idx - global indices to map
732 
733     Output Parameters:
734 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
735 -   idxout - local index of each global index, one must pass in an array long enough
736              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
737              idxout == NULL to determine the required length (returned in nout)
738              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
739              a second time to set the values.
740 
741     Notes:
742     Either nout or idxout may be NULL. idx and idxout may be identical.
743 
744     This is not scalable in memory usage. Each processor requires O(Nglobal) size
745     array to compute these.
746 
747     Level: advanced
748 
749     Developer Note: The manual page states that idx and idxout may be identical but the calling
750        sequence declares idx as const so it cannot be the same as idxout.
751 
752     Concepts: mapping^global to local
753 
754 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
755           ISLocalToGlobalMappingDestroy()
756 @*/
757 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
758                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
759 {
760   PetscInt       i,*globals,nf = 0,tmp,start,end;
761   PetscErrorCode ierr;
762 
763   PetscFunctionBegin;
764   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
765   if (!mapping->globals) {
766     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
767   }
768   globals = mapping->globals;
769   start   = mapping->globalstart;
770   end     = mapping->globalend;
771 
772   if (type == IS_GTOLM_MASK) {
773     if (idxout) {
774       for (i=0; i<n; i++) {
775         if (idx[i] < 0) idxout[i] = idx[i];
776         else if (idx[i] < start) idxout[i] = -1;
777         else if (idx[i] > end)   idxout[i] = -1;
778         else                     idxout[i] = globals[idx[i] - start];
779       }
780     }
781     if (nout) *nout = n;
782   } else {
783     if (idxout) {
784       for (i=0; i<n; i++) {
785         if (idx[i] < 0) continue;
786         if (idx[i] < start) continue;
787         if (idx[i] > end) continue;
788         tmp = globals[idx[i] - start];
789         if (tmp < 0) continue;
790         idxout[nf++] = tmp;
791       }
792     } else {
793       for (i=0; i<n; i++) {
794         if (idx[i] < 0) continue;
795         if (idx[i] < start) continue;
796         if (idx[i] > end) continue;
797         tmp = globals[idx[i] - start];
798         if (tmp < 0) continue;
799         nf++;
800       }
801     }
802     if (nout) *nout = nf;
803   }
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
809 /*@C
810     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
811      each index shared by more than one processor
812 
813     Collective on ISLocalToGlobalMapping
814 
815     Input Parameters:
816 .   mapping - the mapping from local to global indexing
817 
818     Output Parameter:
819 +   nproc - number of processors that are connected to this one
820 .   proc - neighboring processors
821 .   numproc - number of indices for each subdomain (processor)
822 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
823 
824     Level: advanced
825 
826     Concepts: mapping^local to global
827 
828     Fortran Usage:
829 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
830 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
831           PetscInt indices[nproc][numprocmax],ierr)
832         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
833         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
834 
835 
836 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
837           ISLocalToGlobalMappingRestoreInfo()
838 @*/
839 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
840 {
841   PetscErrorCode ierr;
842 
843   PetscFunctionBegin;
844   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
845   if (mapping->info_cached) {
846     *nproc = mapping->info_nproc;
847     *procs = mapping->info_procs;
848     *numprocs = mapping->info_numprocs;
849     *indices = mapping->info_indices;
850   } else {
851     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
852   }
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
858 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
859 {
860   PetscErrorCode ierr;
861   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
862   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
863   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
864   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
865   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
866   PetscInt       first_procs,first_numprocs,*first_indices;
867   MPI_Request    *recv_waits,*send_waits;
868   MPI_Status     recv_status,*send_status,*recv_statuses;
869   MPI_Comm       comm;
870   PetscBool      debug = PETSC_FALSE;
871 
872   PetscFunctionBegin;
873   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
874   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
875   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
876   if (size == 1) {
877     *nproc         = 0;
878     *procs         = NULL;
879     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
880     (*numprocs)[0] = 0;
881     ierr           = PetscNew(indices);CHKERRQ(ierr);
882     (*indices)[0]  = NULL;
883     /* save info for reuse */
884     mapping->info_nproc = *nproc;
885     mapping->info_procs = *procs;
886     mapping->info_numprocs = *numprocs;
887     mapping->info_indices = *indices;
888     mapping->info_cached = PETSC_TRUE;
889     PetscFunctionReturn(0);
890   }
891 
892   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
893 
894   /*
895     Notes on ISLocalToGlobalMappingGetBlockInfo
896 
897     globally owned node - the nodes that have been assigned to this processor in global
898            numbering, just for this routine.
899 
900     nontrivial globally owned node - node assigned to this processor that is on a subdomain
901            boundary (i.e. is has more than one local owner)
902 
903     locally owned node - node that exists on this processors subdomain
904 
905     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
906            local subdomain
907   */
908   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
909   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
910   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
911 
912   for (i=0; i<n; i++) {
913     if (lindices[i] > max) max = lindices[i];
914   }
915   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
916   Ng++;
917   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
918   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
919   scale  = Ng/size + 1;
920   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
921   rstart = scale*rank;
922 
923   /* determine ownership ranges of global indices */
924   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
925   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
926 
927   /* determine owners of each local node  */
928   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
929   for (i=0; i<n; i++) {
930     proc             = lindices[i]/scale; /* processor that globally owns this index */
931     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
932     owner[i]         = proc;
933     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
934   }
935   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
936   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
937 
938   /* inform other processors of number of messages and max length*/
939   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
940   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
941 
942   /* post receives for owned rows */
943   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
944   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
945   for (i=0; i<nrecvs; i++) {
946     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
947   }
948 
949   /* pack messages containing lists of local nodes to owners */
950   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
951   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
952   starts[0] = 0;
953   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
954   for (i=0; i<n; i++) {
955     sends[starts[owner[i]]++] = lindices[i];
956     sends[starts[owner[i]]++] = i;
957   }
958   ierr = PetscFree(owner);CHKERRQ(ierr);
959   starts[0] = 0;
960   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
961 
962   /* send the messages */
963   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
964   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
965   cnt = 0;
966   for (i=0; i<size; i++) {
967     if (nprocs[2*i]) {
968       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
969       dest[cnt] = i;
970       cnt++;
971     }
972   }
973   ierr = PetscFree(starts);CHKERRQ(ierr);
974 
975   /* wait on receives */
976   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
977   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
978   cnt  = nrecvs;
979   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
980   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
981   while (cnt) {
982     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
983     /* unpack receives into our local space */
984     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
985     source[imdex] = recv_status.MPI_SOURCE;
986     len[imdex]    = len[imdex]/2;
987     /* count how many local owners for each of my global owned indices */
988     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
989     cnt--;
990   }
991   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
992 
993   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
994   nowned  = 0;
995   nownedm = 0;
996   for (i=0; i<ng; i++) {
997     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
998   }
999 
1000   /* create single array to contain rank of all local owners of each globally owned index */
1001   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1002   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1003   starts[0] = 0;
1004   for (i=1; i<ng; i++) {
1005     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1006     else starts[i] = starts[i-1];
1007   }
1008 
1009   /* for each nontrival globally owned node list all arriving processors */
1010   for (i=0; i<nrecvs; i++) {
1011     for (j=0; j<len[i]; j++) {
1012       node = recvs[2*i*nmax+2*j]-rstart;
1013       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1014     }
1015   }
1016 
1017   if (debug) { /* -----------------------------------  */
1018     starts[0] = 0;
1019     for (i=1; i<ng; i++) {
1020       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1021       else starts[i] = starts[i-1];
1022     }
1023     for (i=0; i<ng; i++) {
1024       if (nownedsenders[i] > 1) {
1025         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1026         for (j=0; j<nownedsenders[i]; j++) {
1027           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1028         }
1029         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1030       }
1031     }
1032     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1033   } /* -----------------------------------  */
1034 
1035   /* wait on original sends */
1036   if (nsends) {
1037     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1038     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1039     ierr = PetscFree(send_status);CHKERRQ(ierr);
1040   }
1041   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1042   ierr = PetscFree(sends);CHKERRQ(ierr);
1043   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1044 
1045   /* pack messages to send back to local owners */
1046   starts[0] = 0;
1047   for (i=1; i<ng; i++) {
1048     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1049     else starts[i] = starts[i-1];
1050   }
1051   nsends2 = nrecvs;
1052   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1053   for (i=0; i<nrecvs; i++) {
1054     nprocs[i] = 1;
1055     for (j=0; j<len[i]; j++) {
1056       node = recvs[2*i*nmax+2*j]-rstart;
1057       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1058     }
1059   }
1060   nt = 0;
1061   for (i=0; i<nsends2; i++) nt += nprocs[i];
1062 
1063   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1064   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1065 
1066   starts2[0] = 0;
1067   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1068   /*
1069      Each message is 1 + nprocs[i] long, and consists of
1070        (0) the number of nodes being sent back
1071        (1) the local node number,
1072        (2) the number of processors sharing it,
1073        (3) the processors sharing it
1074   */
1075   for (i=0; i<nsends2; i++) {
1076     cnt = 1;
1077     sends2[starts2[i]] = 0;
1078     for (j=0; j<len[i]; j++) {
1079       node = recvs[2*i*nmax+2*j]-rstart;
1080       if (nownedsenders[node] > 1) {
1081         sends2[starts2[i]]++;
1082         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1083         sends2[starts2[i]+cnt++] = nownedsenders[node];
1084         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1085         cnt += nownedsenders[node];
1086       }
1087     }
1088   }
1089 
1090   /* receive the message lengths */
1091   nrecvs2 = nsends;
1092   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1093   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1094   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1095   for (i=0; i<nrecvs2; i++) {
1096     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1097   }
1098 
1099   /* send the message lengths */
1100   for (i=0; i<nsends2; i++) {
1101     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1102   }
1103 
1104   /* wait on receives of lens */
1105   if (nrecvs2) {
1106     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1107     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1108     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1109   }
1110   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1111 
1112   starts3[0] = 0;
1113   nt         = 0;
1114   for (i=0; i<nrecvs2-1; i++) {
1115     starts3[i+1] = starts3[i] + lens2[i];
1116     nt          += lens2[i];
1117   }
1118   if (nrecvs2) nt += lens2[nrecvs2-1];
1119 
1120   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1121   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1122   for (i=0; i<nrecvs2; i++) {
1123     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1124   }
1125 
1126   /* send the messages */
1127   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1128   for (i=0; i<nsends2; i++) {
1129     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1130   }
1131 
1132   /* wait on receives */
1133   if (nrecvs2) {
1134     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1135     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1136     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1137   }
1138   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1139   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1140 
1141   if (debug) { /* -----------------------------------  */
1142     cnt = 0;
1143     for (i=0; i<nrecvs2; i++) {
1144       nt = recvs2[cnt++];
1145       for (j=0; j<nt; j++) {
1146         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1147         for (k=0; k<recvs2[cnt+1]; k++) {
1148           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1149         }
1150         cnt += 2 + recvs2[cnt+1];
1151         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1152       }
1153     }
1154     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1155   } /* -----------------------------------  */
1156 
1157   /* count number subdomains for each local node */
1158   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1159   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1160   cnt  = 0;
1161   for (i=0; i<nrecvs2; i++) {
1162     nt = recvs2[cnt++];
1163     for (j=0; j<nt; j++) {
1164       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1165       cnt += 2 + recvs2[cnt+1];
1166     }
1167   }
1168   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1169   *nproc    = nt;
1170   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1171   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1172   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1173   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1174   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1175   cnt  = 0;
1176   for (i=0; i<size; i++) {
1177     if (nprocs[i] > 0) {
1178       bprocs[i]        = cnt;
1179       (*procs)[cnt]    = i;
1180       (*numprocs)[cnt] = nprocs[i];
1181       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1182       cnt++;
1183     }
1184   }
1185 
1186   /* make the list of subdomains for each nontrivial local node */
1187   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1188   cnt  = 0;
1189   for (i=0; i<nrecvs2; i++) {
1190     nt = recvs2[cnt++];
1191     for (j=0; j<nt; j++) {
1192       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1193       cnt += 2 + recvs2[cnt+1];
1194     }
1195   }
1196   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1197   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1198 
1199   /* sort the node indexing by their global numbers */
1200   nt = *nproc;
1201   for (i=0; i<nt; i++) {
1202     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1203     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1204     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1205     ierr = PetscFree(tmp);CHKERRQ(ierr);
1206   }
1207 
1208   if (debug) { /* -----------------------------------  */
1209     nt = *nproc;
1210     for (i=0; i<nt; i++) {
1211       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1212       for (j=0; j<(*numprocs)[i]; j++) {
1213         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1214       }
1215       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1216     }
1217     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1218   } /* -----------------------------------  */
1219 
1220   /* wait on sends */
1221   if (nsends2) {
1222     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1223     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1224     ierr = PetscFree(send_status);CHKERRQ(ierr);
1225   }
1226 
1227   ierr = PetscFree(starts3);CHKERRQ(ierr);
1228   ierr = PetscFree(dest);CHKERRQ(ierr);
1229   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1230 
1231   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1232   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1233   ierr = PetscFree(starts);CHKERRQ(ierr);
1234   ierr = PetscFree(starts2);CHKERRQ(ierr);
1235   ierr = PetscFree(lens2);CHKERRQ(ierr);
1236 
1237   ierr = PetscFree(source);CHKERRQ(ierr);
1238   ierr = PetscFree(len);CHKERRQ(ierr);
1239   ierr = PetscFree(recvs);CHKERRQ(ierr);
1240   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1241   ierr = PetscFree(sends2);CHKERRQ(ierr);
1242 
1243   /* put the information about myself as the first entry in the list */
1244   first_procs    = (*procs)[0];
1245   first_numprocs = (*numprocs)[0];
1246   first_indices  = (*indices)[0];
1247   for (i=0; i<*nproc; i++) {
1248     if ((*procs)[i] == rank) {
1249       (*procs)[0]    = (*procs)[i];
1250       (*numprocs)[0] = (*numprocs)[i];
1251       (*indices)[0]  = (*indices)[i];
1252       (*procs)[i]    = first_procs;
1253       (*numprocs)[i] = first_numprocs;
1254       (*indices)[i]  = first_indices;
1255       break;
1256     }
1257   }
1258 
1259   /* save info for reuse */
1260   mapping->info_nproc = *nproc;
1261   mapping->info_procs = *procs;
1262   mapping->info_numprocs = *numprocs;
1263   mapping->info_indices = *indices;
1264   mapping->info_cached = PETSC_TRUE;
1265   PetscFunctionReturn(0);
1266 }
1267 
1268 #undef __FUNCT__
1269 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1270 /*@C
1271     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1272 
1273     Collective on ISLocalToGlobalMapping
1274 
1275     Input Parameters:
1276 .   mapping - the mapping from local to global indexing
1277 
1278     Output Parameter:
1279 +   nproc - number of processors that are connected to this one
1280 .   proc - neighboring processors
1281 .   numproc - number of indices for each processor
1282 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1283 
1284     Level: advanced
1285 
1286 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1287           ISLocalToGlobalMappingGetInfo()
1288 @*/
1289 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1290 {
1291   PetscErrorCode ierr;
1292 
1293   PetscFunctionBegin;
1294   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1295   if (mapping->info_free) {
1296     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1297     if (*indices) {
1298       PetscInt i;
1299 
1300       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1301       for (i=1; i<*nproc; i++) {
1302         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1303       }
1304       ierr = PetscFree(*indices);CHKERRQ(ierr);
1305     }
1306   }
1307   *nproc = 0;
1308   *procs = NULL;
1309   *numprocs = NULL;
1310   *indices = NULL;
1311   PetscFunctionReturn(0);
1312 }
1313 
1314 #undef __FUNCT__
1315 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1316 /*@C
1317     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1318      each index shared by more than one processor
1319 
1320     Collective on ISLocalToGlobalMapping
1321 
1322     Input Parameters:
1323 .   mapping - the mapping from local to global indexing
1324 
1325     Output Parameter:
1326 +   nproc - number of processors that are connected to this one
1327 .   proc - neighboring processors
1328 .   numproc - number of indices for each subdomain (processor)
1329 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1330 
1331     Level: advanced
1332 
1333     Concepts: mapping^local to global
1334 
1335     Fortran Usage:
1336 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1337 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1338           PetscInt indices[nproc][numprocmax],ierr)
1339         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1340         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1341 
1342 
1343 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1344           ISLocalToGlobalMappingRestoreInfo()
1345 @*/
1346 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1347 {
1348   PetscErrorCode ierr;
1349   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1350 
1351   PetscFunctionBegin;
1352   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1353   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1354   if (bs > 1) { /* we need to expand the cached info */
1355     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1356     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1357     for (i=0; i<*nproc; i++) {
1358       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1359       for (j=0; j<bnumprocs[i]; j++) {
1360         for (k=0; k<bs; k++) {
1361           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1362         }
1363       }
1364       (*numprocs)[i] = bnumprocs[i]*bs;
1365     }
1366     mapping->info_free = PETSC_TRUE;
1367   } else {
1368     *numprocs = bnumprocs;
1369     *indices  = bindices;
1370   }
1371   PetscFunctionReturn(0);
1372 }
1373 
1374 #undef __FUNCT__
1375 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1376 /*@C
1377     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1378 
1379     Collective on ISLocalToGlobalMapping
1380 
1381     Input Parameters:
1382 .   mapping - the mapping from local to global indexing
1383 
1384     Output Parameter:
1385 +   nproc - number of processors that are connected to this one
1386 .   proc - neighboring processors
1387 .   numproc - number of indices for each processor
1388 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1389 
1390     Level: advanced
1391 
1392 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1393           ISLocalToGlobalMappingGetInfo()
1394 @*/
1395 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1396 {
1397   PetscErrorCode ierr;
1398 
1399   PetscFunctionBegin;
1400   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 #undef __FUNCT__
1405 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1406 /*@C
1407    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1408 
1409    Not Collective
1410 
1411    Input Arguments:
1412 . ltog - local to global mapping
1413 
1414    Output Arguments:
1415 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1416 
1417    Level: advanced
1418 
1419    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1420 
1421 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1422 @*/
1423 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1424 {
1425   PetscFunctionBegin;
1426   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1427   PetscValidPointer(array,2);
1428   if (ltog->bs == 1) {
1429     *array = ltog->indices;
1430   } else {
1431     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1432     const PetscInt *ii;
1433     PetscErrorCode ierr;
1434 
1435     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1436     *array = jj;
1437     k    = 0;
1438     ii   = ltog->indices;
1439     for (i=0; i<n; i++)
1440       for (j=0; j<bs; j++)
1441         jj[k++] = bs*ii[i] + j;
1442   }
1443   PetscFunctionReturn(0);
1444 }
1445 
1446 #undef __FUNCT__
1447 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1448 /*@C
1449    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1450 
1451    Not Collective
1452 
1453    Input Arguments:
1454 + ltog - local to global mapping
1455 - array - array of indices
1456 
1457    Level: advanced
1458 
1459 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1460 @*/
1461 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1462 {
1463   PetscFunctionBegin;
1464   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1465   PetscValidPointer(array,2);
1466   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1467 
1468   if (ltog->bs > 1) {
1469     PetscErrorCode ierr;
1470     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1471   }
1472   PetscFunctionReturn(0);
1473 }
1474 
1475 #undef __FUNCT__
1476 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1477 /*@C
1478    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1479 
1480    Not Collective
1481 
1482    Input Arguments:
1483 . ltog - local to global mapping
1484 
1485    Output Arguments:
1486 . array - array of indices
1487 
1488    Level: advanced
1489 
1490 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1491 @*/
1492 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1493 {
1494   PetscFunctionBegin;
1495   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1496   PetscValidPointer(array,2);
1497   *array = ltog->indices;
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 #undef __FUNCT__
1502 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1503 /*@C
1504    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1505 
1506    Not Collective
1507 
1508    Input Arguments:
1509 + ltog - local to global mapping
1510 - array - array of indices
1511 
1512    Level: advanced
1513 
1514 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1515 @*/
1516 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1517 {
1518   PetscFunctionBegin;
1519   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1520   PetscValidPointer(array,2);
1521   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1522   *array = NULL;
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 #undef __FUNCT__
1527 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1528 /*@C
1529    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1530 
1531    Not Collective
1532 
1533    Input Arguments:
1534 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1535 . n - number of mappings to concatenate
1536 - ltogs - local to global mappings
1537 
1538    Output Arguments:
1539 . ltogcat - new mapping
1540 
1541    Note: this currently always returns a mapping with block size of 1
1542 
1543    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1544 
1545    Level: advanced
1546 
1547 .seealso: ISLocalToGlobalMappingCreate()
1548 @*/
1549 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1550 {
1551   PetscInt       i,cnt,m,*idx;
1552   PetscErrorCode ierr;
1553 
1554   PetscFunctionBegin;
1555   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1556   if (n > 0) PetscValidPointer(ltogs,3);
1557   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1558   PetscValidPointer(ltogcat,4);
1559   for (cnt=0,i=0; i<n; i++) {
1560     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1561     cnt += m;
1562   }
1563   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1564   for (cnt=0,i=0; i<n; i++) {
1565     const PetscInt *subidx;
1566     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1567     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1568     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1569     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1570     cnt += m;
1571   }
1572   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1573   PetscFunctionReturn(0);
1574 }
1575 
1576 
1577