xref: /petsc/src/vec/is/utils/isltog.c (revision 6658fb44ff488c2bcae81ef2c01cc0a1455ea879)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISLocalToGlobalMappingDuplicate"
11 /*@
12     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
13 
14     Not Collective
15 
16     Input Parameter:
17 .   ltog - local to global mapping
18 
19     Output Parameter:
20 .   nltog - the duplicated local to global mapping
21 
22     Level: advanced
23 
24     Concepts: mapping^local to global
25 
26 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
27 @*/
28 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
29 {
30   PetscErrorCode ierr;
31 
32   PetscFunctionBegin;
33   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
34   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
35   PetscFunctionReturn(0);
36 }
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
40 /*@
41     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
42 
43     Not Collective
44 
45     Input Parameter:
46 .   ltog - local to global mapping
47 
48     Output Parameter:
49 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
50 
51     Level: advanced
52 
53     Concepts: mapping^local to global
54 
55 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
56 @*/
57 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
58 {
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   PetscValidIntPointer(n,2);
62   *n = mapping->bs*mapping->n;
63   PetscFunctionReturn(0);
64 }
65 
66 #undef __FUNCT__
67 #define __FUNCT__ "ISLocalToGlobalMappingView"
68 /*@C
69     ISLocalToGlobalMappingView - View a local to global mapping
70 
71     Not Collective
72 
73     Input Parameters:
74 +   ltog - local to global mapping
75 -   viewer - viewer
76 
77     Level: advanced
78 
79     Concepts: mapping^local to global
80 
81 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
82 @*/
83 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
84 {
85   PetscInt       i;
86   PetscMPIInt    rank;
87   PetscBool      iascii;
88   PetscErrorCode ierr;
89 
90   PetscFunctionBegin;
91   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
92   if (!viewer) {
93     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
94   }
95   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
96 
97   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
98   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
99   if (iascii) {
100     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
101     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
102     for (i=0; i<mapping->n; i++) {
103       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
104     }
105     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
106     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
107   }
108   PetscFunctionReturn(0);
109 }
110 
111 #undef __FUNCT__
112 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
113 /*@
114     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
115     ordering and a global parallel ordering.
116 
117     Not collective
118 
119     Input Parameter:
120 .   is - index set containing the global numbers for each local number
121 
122     Output Parameter:
123 .   mapping - new mapping data structure
124 
125     Notes: the block size of the IS determines the block size of the mapping
126     Level: advanced
127 
128     Concepts: mapping^local to global
129 
130 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
131 @*/
132 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
133 {
134   PetscErrorCode ierr;
135   PetscInt       n,bs;
136   const PetscInt *indices;
137   MPI_Comm       comm;
138   PetscBool      isblock;
139 
140   PetscFunctionBegin;
141   PetscValidHeaderSpecific(is,IS_CLASSID,1);
142   PetscValidPointer(mapping,2);
143 
144   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
145   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
146   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
147   if (!isblock) {
148     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
149     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
150     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
151   } else {
152     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
153     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
154     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
155     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
162 /*@C
163     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
164     ordering and a global parallel ordering.
165 
166     Collective
167 
168     Input Parameter:
169 +   sf - star forest mapping contiguous local indices to (rank, offset)
170 -   start - first global index on this process
171 
172     Output Parameter:
173 .   mapping - new mapping data structure
174 
175     Level: advanced
176 
177     Concepts: mapping^local to global
178 
179 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
180 @*/
181 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
182 {
183   PetscErrorCode ierr;
184   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
185   const PetscInt *ilocal;
186   MPI_Comm       comm;
187 
188   PetscFunctionBegin;
189   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
190   PetscValidPointer(mapping,3);
191 
192   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
193   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
194   if (ilocal) {
195     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
196   }
197   else maxlocal = nleaves;
198   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
199   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
200   for (i=0; i<nroots; i++) globals[i] = start + i;
201   for (i=0; i<maxlocal; i++) ltog[i] = -1;
202   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
203   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
204   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
205   ierr = PetscFree(globals);CHKERRQ(ierr);
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "ISLocalToGlobalMappingSetBlockSize"
211 /*@
212     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
213 
214     Not collective
215 
216     Input Parameters:
217 .   mapping - mapping data structure
218 .   bs - the blocksize
219 
220     Level: advanced
221 
222     Concepts: mapping^local to global
223 
224 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
225 @*/
226 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
227 {
228   PetscInt       *nid,*oid;
229   PetscInt       i,cn,on,obs,nn,cum,r,rbs,ri;
230   PetscErrorCode ierr;
231 
232   PetscFunctionBegin;
233   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
234   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
235   if (bs == mapping->bs) PetscFunctionReturn(0);
236   on  = mapping->n;
237   obs = mapping->bs;
238   oid = mapping->indices;
239   nn  = (on*obs)/bs;
240   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
241   if (bs < obs && obs%bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D",bs,obs);
242   if (bs < obs) {
243     r   = bs;
244     rbs = obs/bs;
245     ri  = 1;
246   } else {
247     r   = obs;
248     rbs = 1;
249     ri  = bs/obs;
250   }
251   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
252   for (i=0,cum=0,cn=0;i<on;i++) {
253     if (oid[i] < 0) cn += r;
254     if ((cn && !(cn%bs)) || (oid[i] > -1 && !((oid[i]*obs)%bs))) {
255       if (cum == nn) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices",bs,obs);
256       if (cn && cn%bs) SETERRQ5(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices (neg = %D, rbs %D, r %D)",bs,obs,cn,rbs,r);
257       if (cn) {
258         PetscInt j;
259         for (j=0;j<rbs;j++) nid[cum++] = -1;
260       } else {
261         PetscInt j;
262         for (j=1;j<ri;j++) {
263           if (i+j >= on) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices found",bs,obs);
264           if (oid[i] != oid[i+j]-j) SETERRQ6(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D (%D,%D)",bs,obs,oid[i],oid[i+j],j,ri);
265         }
266         for (j=0;j<rbs;j++) nid[cum++] = (oid[i]*obs)/bs+j;
267       }
268       cn   = 0;
269     }
270   }
271   if (cum != nn || cn) {
272     ierr = PetscFree(nid);CHKERRQ(ierr);
273     SETERRQ6(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Incompatible block sizes %D and %D (new block indices found %D != %D, neg %D, rbs %D)",bs,obs,cum,nn,cn,rbs);
274   }
275   mapping->n           = nn;
276   mapping->bs          = bs;
277   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
278   mapping->indices     = nid;
279   ierr                 = PetscFree(mapping->globals);CHKERRQ(ierr);
280   mapping->globalstart = 0;
281   mapping->globalend   = 0;
282   PetscFunctionReturn(0);
283 }
284 
285 #undef __FUNCT__
286 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
287 /*@
288     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
289     ordering and a global parallel ordering.
290 
291     Not Collective
292 
293     Input Parameters:
294 .   mapping - mapping data structure
295 
296     Output Parameter:
297 .   bs - the blocksize
298 
299     Level: advanced
300 
301     Concepts: mapping^local to global
302 
303 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
304 @*/
305 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
306 {
307   PetscFunctionBegin;
308   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
309   *bs = mapping->bs;
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
315 /*@
316     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
317     ordering and a global parallel ordering.
318 
319     Not Collective, but communicator may have more than one process
320 
321     Input Parameters:
322 +   comm - MPI communicator
323 .   bs - the block size
324 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
325 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
326 -   mode - see PetscCopyMode
327 
328     Output Parameter:
329 .   mapping - new mapping data structure
330 
331     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
332     Level: advanced
333 
334     Concepts: mapping^local to global
335 
336 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
337 @*/
338 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
339 {
340   PetscErrorCode ierr;
341   PetscInt       *in;
342 
343   PetscFunctionBegin;
344   if (n) PetscValidIntPointer(indices,3);
345   PetscValidPointer(mapping,4);
346 
347   *mapping = NULL;
348   ierr = ISInitializePackage();CHKERRQ(ierr);
349 
350   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
351                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
352   (*mapping)->n             = n;
353   (*mapping)->bs            = bs;
354   (*mapping)->info_cached   = PETSC_FALSE;
355   (*mapping)->info_free     = PETSC_FALSE;
356   (*mapping)->info_procs    = NULL;
357   (*mapping)->info_numprocs = NULL;
358   (*mapping)->info_indices  = NULL;
359   /*
360     Do not create the global to local mapping. This is only created if
361     ISGlobalToLocalMapping() is called
362   */
363   (*mapping)->globals = 0;
364   if (mode == PETSC_COPY_VALUES) {
365     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
366     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
367     (*mapping)->indices = in;
368     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
369   } else if (mode == PETSC_OWN_POINTER) {
370     (*mapping)->indices = (PetscInt*)indices;
371     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
372   }
373   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
374   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
375   PetscFunctionReturn(0);
376 }
377 
378 #undef __FUNCT__
379 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
380 /*@
381    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
382    ordering and a global parallel ordering.
383 
384    Note Collective
385 
386    Input Parameters:
387 .  mapping - mapping data structure
388 
389    Level: advanced
390 
391 .seealso: ISLocalToGlobalMappingCreate()
392 @*/
393 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
394 {
395   PetscErrorCode ierr;
396 
397   PetscFunctionBegin;
398   if (!*mapping) PetscFunctionReturn(0);
399   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
400   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
401   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
402   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
403   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
404   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
405   if ((*mapping)->info_indices) {
406     PetscInt i;
407 
408     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
409     for (i=1; i<(*mapping)->info_nproc; i++) {
410       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
411     }
412     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
413   }
414   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
415   *mapping = 0;
416   PetscFunctionReturn(0);
417 }
418 
419 #undef __FUNCT__
420 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
421 /*@
422     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
423     a new index set using the global numbering defined in an ISLocalToGlobalMapping
424     context.
425 
426     Not collective
427 
428     Input Parameters:
429 +   mapping - mapping between local and global numbering
430 -   is - index set in local numbering
431 
432     Output Parameters:
433 .   newis - index set in global numbering
434 
435     Level: advanced
436 
437     Concepts: mapping^local to global
438 
439 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
440           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
441 @*/
442 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
443 {
444   PetscErrorCode ierr;
445   PetscInt       n,*idxout;
446   const PetscInt *idxin;
447 
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
450   PetscValidHeaderSpecific(is,IS_CLASSID,2);
451   PetscValidPointer(newis,3);
452 
453   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
454   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
455   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
456   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
457   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
458   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
459   PetscFunctionReturn(0);
460 }
461 
462 #undef __FUNCT__
463 #define __FUNCT__ "ISLocalToGlobalMappingApply"
464 /*@
465    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
466    and converts them to the global numbering.
467 
468    Not collective
469 
470    Input Parameters:
471 +  mapping - the local to global mapping context
472 .  N - number of integers
473 -  in - input indices in local numbering
474 
475    Output Parameter:
476 .  out - indices in global numbering
477 
478    Notes:
479    The in and out array parameters may be identical.
480 
481    Level: advanced
482 
483 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
484           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
485           AOPetscToApplication(), ISGlobalToLocalMappingApply()
486 
487     Concepts: mapping^local to global
488 @*/
489 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
490 {
491   PetscInt i,bs,Nmax;
492 
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
495   bs   = mapping->bs;
496   Nmax = bs*mapping->n;
497   if (bs == 1) {
498     const PetscInt *idx = mapping->indices;
499     for (i=0; i<N; i++) {
500       if (in[i] < 0) {
501         out[i] = in[i];
502         continue;
503       }
504       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
505       out[i] = idx[in[i]];
506     }
507   } else {
508     const PetscInt *idx = mapping->indices;
509     for (i=0; i<N; i++) {
510       if (in[i] < 0) {
511         out[i] = in[i];
512         continue;
513       }
514       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
515       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
516     }
517   }
518   PetscFunctionReturn(0);
519 }
520 
521 #undef __FUNCT__
522 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
523 /*@
524    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
525 
526    Not collective
527 
528    Input Parameters:
529 +  mapping - the local to global mapping context
530 .  N - number of integers
531 -  in - input indices in local block numbering
532 
533    Output Parameter:
534 .  out - indices in global block numbering
535 
536    Notes:
537    The in and out array parameters may be identical.
538 
539    Example:
540      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
541      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
542 
543    Level: advanced
544 
545 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
546           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
547           AOPetscToApplication(), ISGlobalToLocalMappingApply()
548 
549     Concepts: mapping^local to global
550 @*/
551 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
552 {
553 
554   PetscFunctionBegin;
555   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
556   {
557     PetscInt i,Nmax = mapping->n;
558     const PetscInt *idx = mapping->indices;
559 
560     for (i=0; i<N; i++) {
561       if (in[i] < 0) {
562         out[i] = in[i];
563         continue;
564       }
565       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
566       out[i] = idx[in[i]];
567     }
568   }
569   PetscFunctionReturn(0);
570 }
571 
572 /* -----------------------------------------------------------------------------------------*/
573 
574 #undef __FUNCT__
575 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
576 /*
577     Creates the global fields in the ISLocalToGlobalMapping structure
578 */
579 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
580 {
581   PetscErrorCode ierr;
582   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
583 
584   PetscFunctionBegin;
585   end   = 0;
586   start = PETSC_MAX_INT;
587 
588   for (i=0; i<n; i++) {
589     if (idx[i] < 0) continue;
590     if (idx[i] < start) start = idx[i];
591     if (idx[i] > end)   end   = idx[i];
592   }
593   if (start > end) {start = 0; end = -1;}
594   mapping->globalstart = start;
595   mapping->globalend   = end;
596 
597   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
598   mapping->globals = globals;
599   for (i=0; i<end-start+1; i++) globals[i] = -1;
600   for (i=0; i<n; i++) {
601     if (idx[i] < 0) continue;
602     globals[idx[i] - start] = i;
603   }
604 
605   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
606   PetscFunctionReturn(0);
607 }
608 
609 #undef __FUNCT__
610 #define __FUNCT__ "ISGlobalToLocalMappingApply"
611 /*@
612     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
613     specified with a global numbering.
614 
615     Not collective
616 
617     Input Parameters:
618 +   mapping - mapping between local and global numbering
619 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
620            IS_GTOLM_DROP - drops the indices with no local value from the output list
621 .   n - number of global indices to map
622 -   idx - global indices to map
623 
624     Output Parameters:
625 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
626 -   idxout - local index of each global index, one must pass in an array long enough
627              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
628              idxout == NULL to determine the required length (returned in nout)
629              and then allocate the required space and call ISGlobalToLocalMappingApply()
630              a second time to set the values.
631 
632     Notes:
633     Either nout or idxout may be NULL. idx and idxout may be identical.
634 
635     This is not scalable in memory usage. Each processor requires O(Nglobal) size
636     array to compute these.
637 
638     Level: advanced
639 
640     Developer Note: The manual page states that idx and idxout may be identical but the calling
641        sequence declares idx as const so it cannot be the same as idxout.
642 
643     Concepts: mapping^global to local
644 
645 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
646           ISLocalToGlobalMappingDestroy()
647 @*/
648 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
649                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
650 {
651   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
652   PetscErrorCode ierr;
653 
654   PetscFunctionBegin;
655   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
656   if (!mapping->globals) {
657     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
658   }
659   globals = mapping->globals;
660   start   = mapping->globalstart;
661   end     = mapping->globalend;
662   bs      = mapping->bs;
663 
664   if (type == IS_GTOLM_MASK) {
665     if (idxout) {
666       for (i=0; i<n; i++) {
667         if (idx[i] < 0)                   idxout[i] = idx[i];
668         else if (idx[i] < bs*start)       idxout[i] = -1;
669         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
670         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
671       }
672     }
673     if (nout) *nout = n;
674   } else {
675     if (idxout) {
676       for (i=0; i<n; i++) {
677         if (idx[i] < 0) continue;
678         if (idx[i] < bs*start) continue;
679         if (idx[i] > bs*(end+1)-1) continue;
680         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
681         if (tmp < 0) continue;
682         idxout[nf++] = tmp;
683       }
684     } else {
685       for (i=0; i<n; i++) {
686         if (idx[i] < 0) continue;
687         if (idx[i] < bs*start) continue;
688         if (idx[i] > bs*(end+1)-1) continue;
689         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
690         if (tmp < 0) continue;
691         nf++;
692       }
693     }
694     if (nout) *nout = nf;
695   }
696   PetscFunctionReturn(0);
697 }
698 
699 #undef __FUNCT__
700 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
701 /*@
702     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
703     a new index set using the local numbering defined in an ISLocalToGlobalMapping
704     context.
705 
706     Not collective
707 
708     Input Parameters:
709 +   mapping - mapping between local and global numbering
710 -   is - index set in global numbering
711 
712     Output Parameters:
713 .   newis - index set in local numbering
714 
715     Level: advanced
716 
717     Concepts: mapping^local to global
718 
719 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
720           ISLocalToGlobalMappingDestroy()
721 @*/
722 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
723 {
724   PetscErrorCode ierr;
725   PetscInt       n,nout,*idxout;
726   const PetscInt *idxin;
727 
728   PetscFunctionBegin;
729   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
730   PetscValidHeaderSpecific(is,IS_CLASSID,3);
731   PetscValidPointer(newis,4);
732 
733   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
734   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
735   if (type == IS_GTOLM_MASK) {
736     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
737   } else {
738     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
739     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
740   }
741   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
742   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
743   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
744   PetscFunctionReturn(0);
745 }
746 
747 #undef __FUNCT__
748 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
749 /*@
750     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
751     specified with a block global numbering.
752 
753     Not collective
754 
755     Input Parameters:
756 +   mapping - mapping between local and global numbering
757 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
758            IS_GTOLM_DROP - drops the indices with no local value from the output list
759 .   n - number of global indices to map
760 -   idx - global indices to map
761 
762     Output Parameters:
763 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
764 -   idxout - local index of each global index, one must pass in an array long enough
765              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
766              idxout == NULL to determine the required length (returned in nout)
767              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
768              a second time to set the values.
769 
770     Notes:
771     Either nout or idxout may be NULL. idx and idxout may be identical.
772 
773     This is not scalable in memory usage. Each processor requires O(Nglobal) size
774     array to compute these.
775 
776     Level: advanced
777 
778     Developer Note: The manual page states that idx and idxout may be identical but the calling
779        sequence declares idx as const so it cannot be the same as idxout.
780 
781     Concepts: mapping^global to local
782 
783 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
784           ISLocalToGlobalMappingDestroy()
785 @*/
786 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
787                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
788 {
789   PetscInt       i,*globals,nf = 0,tmp,start,end;
790   PetscErrorCode ierr;
791 
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
794   if (!mapping->globals) {
795     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
796   }
797   globals = mapping->globals;
798   start   = mapping->globalstart;
799   end     = mapping->globalend;
800 
801   if (type == IS_GTOLM_MASK) {
802     if (idxout) {
803       for (i=0; i<n; i++) {
804         if (idx[i] < 0) idxout[i] = idx[i];
805         else if (idx[i] < start) idxout[i] = -1;
806         else if (idx[i] > end)   idxout[i] = -1;
807         else                     idxout[i] = globals[idx[i] - start];
808       }
809     }
810     if (nout) *nout = n;
811   } else {
812     if (idxout) {
813       for (i=0; i<n; i++) {
814         if (idx[i] < 0) continue;
815         if (idx[i] < start) continue;
816         if (idx[i] > end) continue;
817         tmp = globals[idx[i] - start];
818         if (tmp < 0) continue;
819         idxout[nf++] = tmp;
820       }
821     } else {
822       for (i=0; i<n; i++) {
823         if (idx[i] < 0) continue;
824         if (idx[i] < start) continue;
825         if (idx[i] > end) continue;
826         tmp = globals[idx[i] - start];
827         if (tmp < 0) continue;
828         nf++;
829       }
830     }
831     if (nout) *nout = nf;
832   }
833   PetscFunctionReturn(0);
834 }
835 
836 #undef __FUNCT__
837 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
838 /*@C
839     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
840      each index shared by more than one processor
841 
842     Collective on ISLocalToGlobalMapping
843 
844     Input Parameters:
845 .   mapping - the mapping from local to global indexing
846 
847     Output Parameter:
848 +   nproc - number of processors that are connected to this one
849 .   proc - neighboring processors
850 .   numproc - number of indices for each subdomain (processor)
851 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
852 
853     Level: advanced
854 
855     Concepts: mapping^local to global
856 
857     Fortran Usage:
858 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
859 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
860           PetscInt indices[nproc][numprocmax],ierr)
861         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
862         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
863 
864 
865 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
866           ISLocalToGlobalMappingRestoreInfo()
867 @*/
868 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
869 {
870   PetscErrorCode ierr;
871 
872   PetscFunctionBegin;
873   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
874   if (mapping->info_cached) {
875     *nproc = mapping->info_nproc;
876     *procs = mapping->info_procs;
877     *numprocs = mapping->info_numprocs;
878     *indices = mapping->info_indices;
879   } else {
880     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
881   }
882   PetscFunctionReturn(0);
883 }
884 
885 #undef __FUNCT__
886 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
887 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
888 {
889   PetscErrorCode ierr;
890   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
891   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
892   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
893   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
894   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
895   PetscInt       first_procs,first_numprocs,*first_indices;
896   MPI_Request    *recv_waits,*send_waits;
897   MPI_Status     recv_status,*send_status,*recv_statuses;
898   MPI_Comm       comm;
899   PetscBool      debug = PETSC_FALSE;
900 
901   PetscFunctionBegin;
902   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
903   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
904   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
905   if (size == 1) {
906     *nproc         = 0;
907     *procs         = NULL;
908     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
909     (*numprocs)[0] = 0;
910     ierr           = PetscNew(indices);CHKERRQ(ierr);
911     (*indices)[0]  = NULL;
912     /* save info for reuse */
913     mapping->info_nproc = *nproc;
914     mapping->info_procs = *procs;
915     mapping->info_numprocs = *numprocs;
916     mapping->info_indices = *indices;
917     mapping->info_cached = PETSC_TRUE;
918     PetscFunctionReturn(0);
919   }
920 
921   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
922 
923   /*
924     Notes on ISLocalToGlobalMappingGetBlockInfo
925 
926     globally owned node - the nodes that have been assigned to this processor in global
927            numbering, just for this routine.
928 
929     nontrivial globally owned node - node assigned to this processor that is on a subdomain
930            boundary (i.e. is has more than one local owner)
931 
932     locally owned node - node that exists on this processors subdomain
933 
934     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
935            local subdomain
936   */
937   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
938   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
939   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
940 
941   for (i=0; i<n; i++) {
942     if (lindices[i] > max) max = lindices[i];
943   }
944   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
945   Ng++;
946   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
947   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
948   scale  = Ng/size + 1;
949   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
950   rstart = scale*rank;
951 
952   /* determine ownership ranges of global indices */
953   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
954   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
955 
956   /* determine owners of each local node  */
957   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
958   for (i=0; i<n; i++) {
959     proc             = lindices[i]/scale; /* processor that globally owns this index */
960     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
961     owner[i]         = proc;
962     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
963   }
964   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
965   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
966 
967   /* inform other processors of number of messages and max length*/
968   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
969   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
970 
971   /* post receives for owned rows */
972   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
973   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
974   for (i=0; i<nrecvs; i++) {
975     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
976   }
977 
978   /* pack messages containing lists of local nodes to owners */
979   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
980   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
981   starts[0] = 0;
982   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
983   for (i=0; i<n; i++) {
984     sends[starts[owner[i]]++] = lindices[i];
985     sends[starts[owner[i]]++] = i;
986   }
987   ierr = PetscFree(owner);CHKERRQ(ierr);
988   starts[0] = 0;
989   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
990 
991   /* send the messages */
992   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
993   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
994   cnt = 0;
995   for (i=0; i<size; i++) {
996     if (nprocs[2*i]) {
997       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
998       dest[cnt] = i;
999       cnt++;
1000     }
1001   }
1002   ierr = PetscFree(starts);CHKERRQ(ierr);
1003 
1004   /* wait on receives */
1005   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1006   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1007   cnt  = nrecvs;
1008   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1009   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1010   while (cnt) {
1011     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1012     /* unpack receives into our local space */
1013     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1014     source[imdex] = recv_status.MPI_SOURCE;
1015     len[imdex]    = len[imdex]/2;
1016     /* count how many local owners for each of my global owned indices */
1017     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1018     cnt--;
1019   }
1020   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1021 
1022   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1023   nowned  = 0;
1024   nownedm = 0;
1025   for (i=0; i<ng; i++) {
1026     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1027   }
1028 
1029   /* create single array to contain rank of all local owners of each globally owned index */
1030   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1031   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1032   starts[0] = 0;
1033   for (i=1; i<ng; i++) {
1034     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1035     else starts[i] = starts[i-1];
1036   }
1037 
1038   /* for each nontrival globally owned node list all arriving processors */
1039   for (i=0; i<nrecvs; i++) {
1040     for (j=0; j<len[i]; j++) {
1041       node = recvs[2*i*nmax+2*j]-rstart;
1042       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1043     }
1044   }
1045 
1046   if (debug) { /* -----------------------------------  */
1047     starts[0] = 0;
1048     for (i=1; i<ng; i++) {
1049       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1050       else starts[i] = starts[i-1];
1051     }
1052     for (i=0; i<ng; i++) {
1053       if (nownedsenders[i] > 1) {
1054         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1055         for (j=0; j<nownedsenders[i]; j++) {
1056           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1057         }
1058         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1059       }
1060     }
1061     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1062   } /* -----------------------------------  */
1063 
1064   /* wait on original sends */
1065   if (nsends) {
1066     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1067     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1068     ierr = PetscFree(send_status);CHKERRQ(ierr);
1069   }
1070   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1071   ierr = PetscFree(sends);CHKERRQ(ierr);
1072   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1073 
1074   /* pack messages to send back to local owners */
1075   starts[0] = 0;
1076   for (i=1; i<ng; i++) {
1077     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1078     else starts[i] = starts[i-1];
1079   }
1080   nsends2 = nrecvs;
1081   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1082   for (i=0; i<nrecvs; i++) {
1083     nprocs[i] = 1;
1084     for (j=0; j<len[i]; j++) {
1085       node = recvs[2*i*nmax+2*j]-rstart;
1086       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1087     }
1088   }
1089   nt = 0;
1090   for (i=0; i<nsends2; i++) nt += nprocs[i];
1091 
1092   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1093   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1094 
1095   starts2[0] = 0;
1096   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1097   /*
1098      Each message is 1 + nprocs[i] long, and consists of
1099        (0) the number of nodes being sent back
1100        (1) the local node number,
1101        (2) the number of processors sharing it,
1102        (3) the processors sharing it
1103   */
1104   for (i=0; i<nsends2; i++) {
1105     cnt = 1;
1106     sends2[starts2[i]] = 0;
1107     for (j=0; j<len[i]; j++) {
1108       node = recvs[2*i*nmax+2*j]-rstart;
1109       if (nownedsenders[node] > 1) {
1110         sends2[starts2[i]]++;
1111         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1112         sends2[starts2[i]+cnt++] = nownedsenders[node];
1113         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1114         cnt += nownedsenders[node];
1115       }
1116     }
1117   }
1118 
1119   /* receive the message lengths */
1120   nrecvs2 = nsends;
1121   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1122   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1123   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1124   for (i=0; i<nrecvs2; i++) {
1125     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1126   }
1127 
1128   /* send the message lengths */
1129   for (i=0; i<nsends2; i++) {
1130     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1131   }
1132 
1133   /* wait on receives of lens */
1134   if (nrecvs2) {
1135     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1136     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1137     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1138   }
1139   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1140 
1141   starts3[0] = 0;
1142   nt         = 0;
1143   for (i=0; i<nrecvs2-1; i++) {
1144     starts3[i+1] = starts3[i] + lens2[i];
1145     nt          += lens2[i];
1146   }
1147   if (nrecvs2) nt += lens2[nrecvs2-1];
1148 
1149   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1150   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1151   for (i=0; i<nrecvs2; i++) {
1152     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1153   }
1154 
1155   /* send the messages */
1156   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1157   for (i=0; i<nsends2; i++) {
1158     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1159   }
1160 
1161   /* wait on receives */
1162   if (nrecvs2) {
1163     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1164     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1165     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1166   }
1167   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1168   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1169 
1170   if (debug) { /* -----------------------------------  */
1171     cnt = 0;
1172     for (i=0; i<nrecvs2; i++) {
1173       nt = recvs2[cnt++];
1174       for (j=0; j<nt; j++) {
1175         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1176         for (k=0; k<recvs2[cnt+1]; k++) {
1177           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1178         }
1179         cnt += 2 + recvs2[cnt+1];
1180         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1181       }
1182     }
1183     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1184   } /* -----------------------------------  */
1185 
1186   /* count number subdomains for each local node */
1187   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1188   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1189   cnt  = 0;
1190   for (i=0; i<nrecvs2; i++) {
1191     nt = recvs2[cnt++];
1192     for (j=0; j<nt; j++) {
1193       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1194       cnt += 2 + recvs2[cnt+1];
1195     }
1196   }
1197   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1198   *nproc    = nt;
1199   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1200   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1201   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1202   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1203   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1204   cnt  = 0;
1205   for (i=0; i<size; i++) {
1206     if (nprocs[i] > 0) {
1207       bprocs[i]        = cnt;
1208       (*procs)[cnt]    = i;
1209       (*numprocs)[cnt] = nprocs[i];
1210       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1211       cnt++;
1212     }
1213   }
1214 
1215   /* make the list of subdomains for each nontrivial local node */
1216   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1217   cnt  = 0;
1218   for (i=0; i<nrecvs2; i++) {
1219     nt = recvs2[cnt++];
1220     for (j=0; j<nt; j++) {
1221       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1222       cnt += 2 + recvs2[cnt+1];
1223     }
1224   }
1225   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1226   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1227 
1228   /* sort the node indexing by their global numbers */
1229   nt = *nproc;
1230   for (i=0; i<nt; i++) {
1231     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1232     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1233     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1234     ierr = PetscFree(tmp);CHKERRQ(ierr);
1235   }
1236 
1237   if (debug) { /* -----------------------------------  */
1238     nt = *nproc;
1239     for (i=0; i<nt; i++) {
1240       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1241       for (j=0; j<(*numprocs)[i]; j++) {
1242         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1243       }
1244       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1245     }
1246     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1247   } /* -----------------------------------  */
1248 
1249   /* wait on sends */
1250   if (nsends2) {
1251     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1252     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1253     ierr = PetscFree(send_status);CHKERRQ(ierr);
1254   }
1255 
1256   ierr = PetscFree(starts3);CHKERRQ(ierr);
1257   ierr = PetscFree(dest);CHKERRQ(ierr);
1258   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1259 
1260   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1261   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1262   ierr = PetscFree(starts);CHKERRQ(ierr);
1263   ierr = PetscFree(starts2);CHKERRQ(ierr);
1264   ierr = PetscFree(lens2);CHKERRQ(ierr);
1265 
1266   ierr = PetscFree(source);CHKERRQ(ierr);
1267   ierr = PetscFree(len);CHKERRQ(ierr);
1268   ierr = PetscFree(recvs);CHKERRQ(ierr);
1269   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1270   ierr = PetscFree(sends2);CHKERRQ(ierr);
1271 
1272   /* put the information about myself as the first entry in the list */
1273   first_procs    = (*procs)[0];
1274   first_numprocs = (*numprocs)[0];
1275   first_indices  = (*indices)[0];
1276   for (i=0; i<*nproc; i++) {
1277     if ((*procs)[i] == rank) {
1278       (*procs)[0]    = (*procs)[i];
1279       (*numprocs)[0] = (*numprocs)[i];
1280       (*indices)[0]  = (*indices)[i];
1281       (*procs)[i]    = first_procs;
1282       (*numprocs)[i] = first_numprocs;
1283       (*indices)[i]  = first_indices;
1284       break;
1285     }
1286   }
1287 
1288   /* save info for reuse */
1289   mapping->info_nproc = *nproc;
1290   mapping->info_procs = *procs;
1291   mapping->info_numprocs = *numprocs;
1292   mapping->info_indices = *indices;
1293   mapping->info_cached = PETSC_TRUE;
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 #undef __FUNCT__
1298 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1299 /*@C
1300     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1301 
1302     Collective on ISLocalToGlobalMapping
1303 
1304     Input Parameters:
1305 .   mapping - the mapping from local to global indexing
1306 
1307     Output Parameter:
1308 +   nproc - number of processors that are connected to this one
1309 .   proc - neighboring processors
1310 .   numproc - number of indices for each processor
1311 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1312 
1313     Level: advanced
1314 
1315 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1316           ISLocalToGlobalMappingGetInfo()
1317 @*/
1318 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1319 {
1320   PetscErrorCode ierr;
1321 
1322   PetscFunctionBegin;
1323   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1324   if (mapping->info_free) {
1325     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1326     if (*indices) {
1327       PetscInt i;
1328 
1329       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1330       for (i=1; i<*nproc; i++) {
1331         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1332       }
1333       ierr = PetscFree(*indices);CHKERRQ(ierr);
1334     }
1335   }
1336   *nproc = 0;
1337   *procs = NULL;
1338   *numprocs = NULL;
1339   *indices = NULL;
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 #undef __FUNCT__
1344 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1345 /*@C
1346     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1347      each index shared by more than one processor
1348 
1349     Collective on ISLocalToGlobalMapping
1350 
1351     Input Parameters:
1352 .   mapping - the mapping from local to global indexing
1353 
1354     Output Parameter:
1355 +   nproc - number of processors that are connected to this one
1356 .   proc - neighboring processors
1357 .   numproc - number of indices for each subdomain (processor)
1358 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1359 
1360     Level: advanced
1361 
1362     Concepts: mapping^local to global
1363 
1364     Fortran Usage:
1365 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1366 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1367           PetscInt indices[nproc][numprocmax],ierr)
1368         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1369         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1370 
1371 
1372 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1373           ISLocalToGlobalMappingRestoreInfo()
1374 @*/
1375 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1376 {
1377   PetscErrorCode ierr;
1378   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1379 
1380   PetscFunctionBegin;
1381   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1382   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1383   if (bs > 1) { /* we need to expand the cached info */
1384     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1385     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1386     for (i=0; i<*nproc; i++) {
1387       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1388       for (j=0; j<bnumprocs[i]; j++) {
1389         for (k=0; k<bs; k++) {
1390           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1391         }
1392       }
1393       (*numprocs)[i] = bnumprocs[i]*bs;
1394     }
1395     mapping->info_free = PETSC_TRUE;
1396   } else {
1397     *numprocs = bnumprocs;
1398     *indices  = bindices;
1399   }
1400   PetscFunctionReturn(0);
1401 }
1402 
1403 #undef __FUNCT__
1404 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1405 /*@C
1406     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1407 
1408     Collective on ISLocalToGlobalMapping
1409 
1410     Input Parameters:
1411 .   mapping - the mapping from local to global indexing
1412 
1413     Output Parameter:
1414 +   nproc - number of processors that are connected to this one
1415 .   proc - neighboring processors
1416 .   numproc - number of indices for each processor
1417 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1418 
1419     Level: advanced
1420 
1421 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1422           ISLocalToGlobalMappingGetInfo()
1423 @*/
1424 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1425 {
1426   PetscErrorCode ierr;
1427 
1428   PetscFunctionBegin;
1429   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1430   PetscFunctionReturn(0);
1431 }
1432 
1433 #undef __FUNCT__
1434 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1435 /*@C
1436    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1437 
1438    Not Collective
1439 
1440    Input Arguments:
1441 . ltog - local to global mapping
1442 
1443    Output Arguments:
1444 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1445 
1446    Level: advanced
1447 
1448    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1449 
1450 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1451 @*/
1452 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1453 {
1454   PetscFunctionBegin;
1455   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1456   PetscValidPointer(array,2);
1457   if (ltog->bs == 1) {
1458     *array = ltog->indices;
1459   } else {
1460     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1461     const PetscInt *ii;
1462     PetscErrorCode ierr;
1463 
1464     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1465     *array = jj;
1466     k    = 0;
1467     ii   = ltog->indices;
1468     for (i=0; i<n; i++)
1469       for (j=0; j<bs; j++)
1470         jj[k++] = bs*ii[i] + j;
1471   }
1472   PetscFunctionReturn(0);
1473 }
1474 
1475 #undef __FUNCT__
1476 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1477 /*@C
1478    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1479 
1480    Not Collective
1481 
1482    Input Arguments:
1483 + ltog - local to global mapping
1484 - array - array of indices
1485 
1486    Level: advanced
1487 
1488 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1489 @*/
1490 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1491 {
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1494   PetscValidPointer(array,2);
1495   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1496 
1497   if (ltog->bs > 1) {
1498     PetscErrorCode ierr;
1499     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1500   }
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 #undef __FUNCT__
1505 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1506 /*@C
1507    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1508 
1509    Not Collective
1510 
1511    Input Arguments:
1512 . ltog - local to global mapping
1513 
1514    Output Arguments:
1515 . array - array of indices
1516 
1517    Level: advanced
1518 
1519 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1520 @*/
1521 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1522 {
1523   PetscFunctionBegin;
1524   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1525   PetscValidPointer(array,2);
1526   *array = ltog->indices;
1527   PetscFunctionReturn(0);
1528 }
1529 
1530 #undef __FUNCT__
1531 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1532 /*@C
1533    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1534 
1535    Not Collective
1536 
1537    Input Arguments:
1538 + ltog - local to global mapping
1539 - array - array of indices
1540 
1541    Level: advanced
1542 
1543 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1544 @*/
1545 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1546 {
1547   PetscFunctionBegin;
1548   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1549   PetscValidPointer(array,2);
1550   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1551   *array = NULL;
1552   PetscFunctionReturn(0);
1553 }
1554 
1555 #undef __FUNCT__
1556 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1557 /*@C
1558    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1559 
1560    Not Collective
1561 
1562    Input Arguments:
1563 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1564 . n - number of mappings to concatenate
1565 - ltogs - local to global mappings
1566 
1567    Output Arguments:
1568 . ltogcat - new mapping
1569 
1570    Note: this currently always returns a mapping with block size of 1
1571 
1572    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1573 
1574    Level: advanced
1575 
1576 .seealso: ISLocalToGlobalMappingCreate()
1577 @*/
1578 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1579 {
1580   PetscInt       i,cnt,m,*idx;
1581   PetscErrorCode ierr;
1582 
1583   PetscFunctionBegin;
1584   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1585   if (n > 0) PetscValidPointer(ltogs,3);
1586   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1587   PetscValidPointer(ltogcat,4);
1588   for (cnt=0,i=0; i<n; i++) {
1589     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1590     cnt += m;
1591   }
1592   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1593   for (cnt=0,i=0; i<n; i++) {
1594     const PetscInt *subidx;
1595     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1596     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1597     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1598     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1599     cnt += m;
1600   }
1601   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1602   PetscFunctionReturn(0);
1603 }
1604 
1605 
1606