xref: /petsc/src/vec/is/utils/isltog.c (revision 021e0df48735593a4b10a8d433fd5ac359e13c2e)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@C
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
142   /*  if (!isblock) { */
143     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
145     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
146     /*  } else {
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150      }*/
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   *bs = mapping->bs;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
232 /*@
233     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234     ordering and a global parallel ordering.
235 
236     Not Collective, but communicator may have more than one process
237 
238     Input Parameters:
239 +   comm - MPI communicator
240 .   bs - the block size
241 .   n - the number of local elements
242 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled by the blocksize bs
243 -   mode - see PetscCopyMode
244 
245     Output Parameter:
246 .   mapping - new mapping data structure
247 
248     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249     Level: advanced
250 
251     Concepts: mapping^local to global
252 
253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254 @*/
255 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256 {
257   PetscErrorCode ierr;
258   PetscInt       *in;
259 
260   PetscFunctionBegin;
261   if (n) PetscValidIntPointer(indices,3);
262   PetscValidPointer(mapping,4);
263 
264   *mapping = NULL;
265   ierr = ISInitializePackage();CHKERRQ(ierr);
266 
267   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
269   (*mapping)->n  = n;
270   (*mapping)->bs = bs;
271   /*
272     Do not create the global to local mapping. This is only created if
273     ISGlobalToLocalMapping() is called
274   */
275   (*mapping)->globals = 0;
276   if (mode == PETSC_COPY_VALUES) {
277     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
278     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
279     (*mapping)->indices = in;
280     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
281   } else if (mode == PETSC_OWN_POINTER) {
282     (*mapping)->indices = (PetscInt*)indices;
283     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
284   }
285   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
291 /*@
292    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
293    ordering and a global parallel ordering.
294 
295    Note Collective
296 
297    Input Parameters:
298 .  mapping - mapping data structure
299 
300    Level: advanced
301 
302 .seealso: ISLocalToGlobalMappingCreate()
303 @*/
304 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
305 {
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   if (!*mapping) PetscFunctionReturn(0);
310   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
311   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
312   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
313   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
314   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
315   *mapping = 0;
316   PetscFunctionReturn(0);
317 }
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
321 /*@
322     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
323     a new index set using the global numbering defined in an ISLocalToGlobalMapping
324     context.
325 
326     Not collective
327 
328     Input Parameters:
329 +   mapping - mapping between local and global numbering
330 -   is - index set in local numbering
331 
332     Output Parameters:
333 .   newis - index set in global numbering
334 
335     Level: advanced
336 
337     Concepts: mapping^local to global
338 
339 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
340           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
341 @*/
342 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
343 {
344   PetscErrorCode ierr;
345   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
346   const PetscInt *idxin;
347 
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
350   PetscValidHeaderSpecific(is,IS_CLASSID,2);
351   PetscValidPointer(newis,3);
352 
353   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
354   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
355   idxmap = mapping->indices;
356 
357   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
358   for (i=0; i<n; i++) {
359     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",idxin[i],Nmax-1,i);
360     idxout[i] = idxmap[idxin[i]];
361   }
362   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
363   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
364   PetscFunctionReturn(0);
365 }
366 
367 #undef __FUNCT__
368 #define __FUNCT__ "ISLocalToGlobalMappingApply"
369 /*@
370    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
371    and converts them to the global numbering.
372 
373    Not collective
374 
375    Input Parameters:
376 +  mapping - the local to global mapping context
377 .  N - number of integers
378 -  in - input indices in local numbering
379 
380    Output Parameter:
381 .  out - indices in global numbering
382 
383    Notes:
384    The in and out array parameters may be identical.
385 
386    Level: advanced
387 
388 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
389           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
390           AOPetscToApplication(), ISGlobalToLocalMappingApply()
391 
392     Concepts: mapping^local to global
393 @*/
394 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
395 {
396   PetscInt       i,Nmax = mapping->n,bs = mapping->bs;
397   const PetscInt *idx = mapping->indices;
398 
399   PetscFunctionBegin;
400   if (bs == 1) {
401     for (i=0; i<N; i++) {
402       if (in[i] < 0) {
403         out[i] = in[i];
404         continue;
405       }
406       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
407       out[i] = idx[in[i]];
408     }
409   } else {
410     for (i=0; i<N; i++) {
411       if (in[i] < 0) {
412         out[i] = in[i];
413         continue;
414       }
415       if (in[i] >= bs*Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
416       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
417     }
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
424 /*@
425    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local numbering  and converts them to the global numbering.
426 
427    Not collective
428 
429    Input Parameters:
430 +  mapping - the local to global mapping context
431 .  N - number of integers
432 -  in - input indices in local numbering
433 
434    Output Parameter:
435 .  out - indices in global numbering
436 
437    Notes:
438    The in and out array parameters may be identical.
439 
440    Level: advanced
441 
442 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
443           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
444           AOPetscToApplication(), ISGlobalToLocalMappingApply()
445 
446     Concepts: mapping^local to global
447 @*/
448 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
449 {
450   PetscInt       i,Nmax = mapping->n;
451   const PetscInt *idx = mapping->indices;
452 
453   PetscFunctionBegin;
454   for (i=0; i<N; i++) {
455     if (in[i] < 0) {
456       out[i] = in[i];
457       continue;
458     }
459     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i]*mapping->bs,Nmax,i);
460     out[i] = idx[in[i]];
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 /* -----------------------------------------------------------------------------------------*/
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
469 /*
470     Creates the global fields in the ISLocalToGlobalMapping structure
471 */
472 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
473 {
474   PetscErrorCode ierr;
475   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
476 
477   PetscFunctionBegin;
478   end   = 0;
479   start = PETSC_MAX_INT;
480 
481   for (i=0; i<n; i++) {
482     if (idx[i] < 0) continue;
483     if (idx[i] < start) start = idx[i];
484     if (idx[i] > end)   end   = idx[i];
485   }
486   if (start > end) {start = 0; end = -1;}
487   mapping->globalstart = start;
488   mapping->globalend   = end;
489 
490   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
491   mapping->globals = globals;
492   for (i=0; i<end-start+1; i++) globals[i] = -1;
493   for (i=0; i<n; i++) {
494     if (idx[i] < 0) continue;
495     globals[idx[i] - start] = i;
496   }
497 
498   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
499   PetscFunctionReturn(0);
500 }
501 
502 #undef __FUNCT__
503 #define __FUNCT__ "ISGlobalToLocalMappingApply"
504 /*@
505     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
506     specified with a global numbering.
507 
508     Not collective
509 
510     Input Parameters:
511 +   mapping - mapping between local and global numbering
512 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
513            IS_GTOLM_DROP - drops the indices with no local value from the output list
514 .   n - number of global indices to map
515 -   idx - global indices to map
516 
517     Output Parameters:
518 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
519 -   idxout - local index of each global index, one must pass in an array long enough
520              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
521              idxout == NULL to determine the required length (returned in nout)
522              and then allocate the required space and call ISGlobalToLocalMappingApply()
523              a second time to set the values.
524 
525     Notes:
526     Either nout or idxout may be NULL. idx and idxout may be identical.
527 
528     This is not scalable in memory usage. Each processor requires O(Nglobal) size
529     array to compute these.
530 
531     Level: advanced
532 
533     Developer Note: The manual page states that idx and idxout may be identical but the calling
534        sequence declares idx as const so it cannot be the same as idxout.
535 
536     Concepts: mapping^global to local
537 
538 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
539           ISLocalToGlobalMappingDestroy()
540 @*/
541 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
542                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
543 {
544   PetscInt       i,*globals,nf = 0,tmp,start,end;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin;
548   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
549   if (!mapping->globals) {
550     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
551   }
552   globals = mapping->globals;
553   start   = mapping->globalstart;
554   end     = mapping->globalend;
555 
556   if (type == IS_GTOLM_MASK) {
557     if (idxout) {
558       for (i=0; i<n; i++) {
559         if (idx[i] < 0) idxout[i] = idx[i];
560         else if (idx[i] < start) idxout[i] = -1;
561         else if (idx[i] > end)   idxout[i] = -1;
562         else                     idxout[i] = globals[idx[i] - start];
563       }
564     }
565     if (nout) *nout = n;
566   } else {
567     if (idxout) {
568       for (i=0; i<n; i++) {
569         if (idx[i] < 0) continue;
570         if (idx[i] < start) continue;
571         if (idx[i] > end) continue;
572         tmp = globals[idx[i] - start];
573         if (tmp < 0) continue;
574         idxout[nf++] = tmp;
575       }
576     } else {
577       for (i=0; i<n; i++) {
578         if (idx[i] < 0) continue;
579         if (idx[i] < start) continue;
580         if (idx[i] > end) continue;
581         tmp = globals[idx[i] - start];
582         if (tmp < 0) continue;
583         nf++;
584       }
585     }
586     if (nout) *nout = nf;
587   }
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
593 /*@C
594     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
595      each index shared by more than one processor
596 
597     Collective on ISLocalToGlobalMapping
598 
599     Input Parameters:
600 .   mapping - the mapping from local to global indexing
601 
602     Output Parameter:
603 +   nproc - number of processors that are connected to this one
604 .   proc - neighboring processors
605 .   numproc - number of indices for each subdomain (processor)
606 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
607 
608     Level: advanced
609 
610     Concepts: mapping^local to global
611 
612     Fortran Usage:
613 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
614 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
615           PetscInt indices[nproc][numprocmax],ierr)
616         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
617         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
618 
619 
620 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
621           ISLocalToGlobalMappingRestoreInfo()
622 @*/
623 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
624 {
625   PetscErrorCode ierr;
626   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
627   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
628   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
629   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
630   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
631   PetscInt       first_procs,first_numprocs,*first_indices;
632   MPI_Request    *recv_waits,*send_waits;
633   MPI_Status     recv_status,*send_status,*recv_statuses;
634   MPI_Comm       comm;
635   PetscBool      debug = PETSC_FALSE;
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
639   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
640   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
641   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
642   if (size == 1) {
643     *nproc         = 0;
644     *procs         = NULL;
645     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
646     (*numprocs)[0] = 0;
647     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
648     (*indices)[0]  = NULL;
649     PetscFunctionReturn(0);
650   }
651 
652   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
653 
654   /*
655     Notes on ISLocalToGlobalMappingGetInfo
656 
657     globally owned node - the nodes that have been assigned to this processor in global
658            numbering, just for this routine.
659 
660     nontrivial globally owned node - node assigned to this processor that is on a subdomain
661            boundary (i.e. is has more than one local owner)
662 
663     locally owned node - node that exists on this processors subdomain
664 
665     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
666            local subdomain
667   */
668   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
669   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
670   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
671 
672   for (i=0; i<n; i++) {
673     if (lindices[i] > max) max = lindices[i];
674   }
675   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
676   Ng++;
677   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
678   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
679   scale  = Ng/size + 1;
680   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
681   rstart = scale*rank;
682 
683   /* determine ownership ranges of global indices */
684   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
685   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
686 
687   /* determine owners of each local node  */
688   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
689   for (i=0; i<n; i++) {
690     proc             = lindices[i]/scale; /* processor that globally owns this index */
691     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
692     owner[i]         = proc;
693     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
694   }
695   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
696   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
697 
698   /* inform other processors of number of messages and max length*/
699   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
700   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
701 
702   /* post receives for owned rows */
703   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
704   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
705   for (i=0; i<nrecvs; i++) {
706     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
707   }
708 
709   /* pack messages containing lists of local nodes to owners */
710   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
711   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
712   starts[0] = 0;
713   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
714   for (i=0; i<n; i++) {
715     sends[starts[owner[i]]++] = lindices[i];
716     sends[starts[owner[i]]++] = i;
717   }
718   ierr = PetscFree(owner);CHKERRQ(ierr);
719   starts[0] = 0;
720   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
721 
722   /* send the messages */
723   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
724   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
725   cnt = 0;
726   for (i=0; i<size; i++) {
727     if (nprocs[2*i]) {
728       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
729       dest[cnt] = i;
730       cnt++;
731     }
732   }
733   ierr = PetscFree(starts);CHKERRQ(ierr);
734 
735   /* wait on receives */
736   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
737   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
738   cnt  = nrecvs;
739   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
740   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
741   while (cnt) {
742     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
743     /* unpack receives into our local space */
744     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
745     source[imdex] = recv_status.MPI_SOURCE;
746     len[imdex]    = len[imdex]/2;
747     /* count how many local owners for each of my global owned indices */
748     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
749     cnt--;
750   }
751   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
752 
753   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
754   nowned  = 0;
755   nownedm = 0;
756   for (i=0; i<ng; i++) {
757     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
758   }
759 
760   /* create single array to contain rank of all local owners of each globally owned index */
761   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
762   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
763   starts[0] = 0;
764   for (i=1; i<ng; i++) {
765     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
766     else starts[i] = starts[i-1];
767   }
768 
769   /* for each nontrival globally owned node list all arriving processors */
770   for (i=0; i<nrecvs; i++) {
771     for (j=0; j<len[i]; j++) {
772       node = recvs[2*i*nmax+2*j]-rstart;
773       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
774     }
775   }
776 
777   if (debug) { /* -----------------------------------  */
778     starts[0] = 0;
779     for (i=1; i<ng; i++) {
780       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
781       else starts[i] = starts[i-1];
782     }
783     for (i=0; i<ng; i++) {
784       if (nownedsenders[i] > 1) {
785         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
786         for (j=0; j<nownedsenders[i]; j++) {
787           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
788         }
789         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
790       }
791     }
792     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
793   } /* -----------------------------------  */
794 
795   /* wait on original sends */
796   if (nsends) {
797     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
798     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
799     ierr = PetscFree(send_status);CHKERRQ(ierr);
800   }
801   ierr = PetscFree(send_waits);CHKERRQ(ierr);
802   ierr = PetscFree(sends);CHKERRQ(ierr);
803   ierr = PetscFree(nprocs);CHKERRQ(ierr);
804 
805   /* pack messages to send back to local owners */
806   starts[0] = 0;
807   for (i=1; i<ng; i++) {
808     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
809     else starts[i] = starts[i-1];
810   }
811   nsends2 = nrecvs;
812   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
813   for (i=0; i<nrecvs; i++) {
814     nprocs[i] = 1;
815     for (j=0; j<len[i]; j++) {
816       node = recvs[2*i*nmax+2*j]-rstart;
817       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
818     }
819   }
820   nt = 0;
821   for (i=0; i<nsends2; i++) nt += nprocs[i];
822 
823   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
824   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
825 
826   starts2[0] = 0;
827   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
828   /*
829      Each message is 1 + nprocs[i] long, and consists of
830        (0) the number of nodes being sent back
831        (1) the local node number,
832        (2) the number of processors sharing it,
833        (3) the processors sharing it
834   */
835   for (i=0; i<nsends2; i++) {
836     cnt = 1;
837     sends2[starts2[i]] = 0;
838     for (j=0; j<len[i]; j++) {
839       node = recvs[2*i*nmax+2*j]-rstart;
840       if (nownedsenders[node] > 1) {
841         sends2[starts2[i]]++;
842         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
843         sends2[starts2[i]+cnt++] = nownedsenders[node];
844         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
845         cnt += nownedsenders[node];
846       }
847     }
848   }
849 
850   /* receive the message lengths */
851   nrecvs2 = nsends;
852   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
853   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
854   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
855   for (i=0; i<nrecvs2; i++) {
856     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
857   }
858 
859   /* send the message lengths */
860   for (i=0; i<nsends2; i++) {
861     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
862   }
863 
864   /* wait on receives of lens */
865   if (nrecvs2) {
866     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
867     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
868     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
869   }
870   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
871 
872   starts3[0] = 0;
873   nt         = 0;
874   for (i=0; i<nrecvs2-1; i++) {
875     starts3[i+1] = starts3[i] + lens2[i];
876     nt          += lens2[i];
877   }
878   if (nrecvs2) nt += lens2[nrecvs2-1];
879 
880   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
881   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
882   for (i=0; i<nrecvs2; i++) {
883     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
884   }
885 
886   /* send the messages */
887   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
888   for (i=0; i<nsends2; i++) {
889     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
890   }
891 
892   /* wait on receives */
893   if (nrecvs2) {
894     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
895     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
896     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
897   }
898   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
899   ierr = PetscFree(nprocs);CHKERRQ(ierr);
900 
901   if (debug) { /* -----------------------------------  */
902     cnt = 0;
903     for (i=0; i<nrecvs2; i++) {
904       nt = recvs2[cnt++];
905       for (j=0; j<nt; j++) {
906         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
907         for (k=0; k<recvs2[cnt+1]; k++) {
908           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
909         }
910         cnt += 2 + recvs2[cnt+1];
911         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
912       }
913     }
914     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
915   } /* -----------------------------------  */
916 
917   /* count number subdomains for each local node */
918   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
919   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
920   cnt  = 0;
921   for (i=0; i<nrecvs2; i++) {
922     nt = recvs2[cnt++];
923     for (j=0; j<nt; j++) {
924       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
925       cnt += 2 + recvs2[cnt+1];
926     }
927   }
928   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
929   *nproc    = nt;
930   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
931   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
932   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
933   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
934   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
935   cnt       = 0;
936   for (i=0; i<size; i++) {
937     if (nprocs[i] > 0) {
938       bprocs[i]        = cnt;
939       (*procs)[cnt]    = i;
940       (*numprocs)[cnt] = nprocs[i];
941       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
942       cnt++;
943     }
944   }
945 
946   /* make the list of subdomains for each nontrivial local node */
947   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
948   cnt  = 0;
949   for (i=0; i<nrecvs2; i++) {
950     nt = recvs2[cnt++];
951     for (j=0; j<nt; j++) {
952       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
953       cnt += 2 + recvs2[cnt+1];
954     }
955   }
956   ierr = PetscFree(bprocs);CHKERRQ(ierr);
957   ierr = PetscFree(recvs2);CHKERRQ(ierr);
958 
959   /* sort the node indexing by their global numbers */
960   nt = *nproc;
961   for (i=0; i<nt; i++) {
962     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
963     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
964     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
965     ierr = PetscFree(tmp);CHKERRQ(ierr);
966   }
967 
968   if (debug) { /* -----------------------------------  */
969     nt = *nproc;
970     for (i=0; i<nt; i++) {
971       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
972       for (j=0; j<(*numprocs)[i]; j++) {
973         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
974       }
975       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
976     }
977     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
978   } /* -----------------------------------  */
979 
980   /* wait on sends */
981   if (nsends2) {
982     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
983     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
984     ierr = PetscFree(send_status);CHKERRQ(ierr);
985   }
986 
987   ierr = PetscFree(starts3);CHKERRQ(ierr);
988   ierr = PetscFree(dest);CHKERRQ(ierr);
989   ierr = PetscFree(send_waits);CHKERRQ(ierr);
990 
991   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
992   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
993   ierr = PetscFree(starts);CHKERRQ(ierr);
994   ierr = PetscFree(starts2);CHKERRQ(ierr);
995   ierr = PetscFree(lens2);CHKERRQ(ierr);
996 
997   ierr = PetscFree(source);CHKERRQ(ierr);
998   ierr = PetscFree(len);CHKERRQ(ierr);
999   ierr = PetscFree(recvs);CHKERRQ(ierr);
1000   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1001   ierr = PetscFree(sends2);CHKERRQ(ierr);
1002 
1003   /* put the information about myself as the first entry in the list */
1004   first_procs    = (*procs)[0];
1005   first_numprocs = (*numprocs)[0];
1006   first_indices  = (*indices)[0];
1007   for (i=0; i<*nproc; i++) {
1008     if ((*procs)[i] == rank) {
1009       (*procs)[0]    = (*procs)[i];
1010       (*numprocs)[0] = (*numprocs)[i];
1011       (*indices)[0]  = (*indices)[i];
1012       (*procs)[i]    = first_procs;
1013       (*numprocs)[i] = first_numprocs;
1014       (*indices)[i]  = first_indices;
1015       break;
1016     }
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 #undef __FUNCT__
1022 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1023 /*@C
1024     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1025 
1026     Collective on ISLocalToGlobalMapping
1027 
1028     Input Parameters:
1029 .   mapping - the mapping from local to global indexing
1030 
1031     Output Parameter:
1032 +   nproc - number of processors that are connected to this one
1033 .   proc - neighboring processors
1034 .   numproc - number of indices for each processor
1035 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1036 
1037     Level: advanced
1038 
1039 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1040           ISLocalToGlobalMappingGetInfo()
1041 @*/
1042 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1043 {
1044   PetscErrorCode ierr;
1045   PetscInt       i;
1046 
1047   PetscFunctionBegin;
1048   ierr = PetscFree(*procs);CHKERRQ(ierr);
1049   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1050   if (*indices) {
1051     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1052     for (i=1; i<*nproc; i++) {
1053       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1054     }
1055     ierr = PetscFree(*indices);CHKERRQ(ierr);
1056   }
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1062 /*@C
1063    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1064 
1065    Not Collective
1066 
1067    Input Arguments:
1068 . ltog - local to global mapping
1069 
1070    Output Arguments:
1071 . array - array of indices
1072 
1073    Level: advanced
1074 
1075 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1076 @*/
1077 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1078 {
1079   PetscFunctionBegin;
1080   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1081   PetscValidPointer(array,2);
1082   if (ltog->bs == 1) {
1083     *array = ltog->indices;
1084   } else {
1085     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1086     const PetscInt *ii;
1087     PetscErrorCode ierr;
1088 
1089     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1090     *array = jj;
1091     k    = 0;
1092     ii   = ltog->indices;
1093     for (i=0; i<n; i++)
1094       for (j=0; j<bs; j++)
1095         jj[k++] = bs*ii[i] + j;
1096   }
1097   PetscFunctionReturn(0);
1098 }
1099 
1100 #undef __FUNCT__
1101 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1102 /*@C
1103    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1104 
1105    Not Collective
1106 
1107    Input Arguments:
1108 + ltog - local to global mapping
1109 - array - array of indices
1110 
1111    Level: advanced
1112 
1113 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1114 @*/
1115 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1116 {
1117   PetscFunctionBegin;
1118   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1119   PetscValidPointer(array,2);
1120   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1121 
1122   if (ltog->bs > 1) {
1123     PetscErrorCode ierr;
1124     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1125   }
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 #undef __FUNCT__
1130 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1131 /*@C
1132    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1133 
1134    Not Collective
1135 
1136    Input Arguments:
1137 . ltog - local to global mapping
1138 
1139    Output Arguments:
1140 . array - array of indices
1141 
1142    Level: advanced
1143 
1144 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1145 @*/
1146 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1147 {
1148   PetscFunctionBegin;
1149   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1150   PetscValidPointer(array,2);
1151   *array = ltog->indices;
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 #undef __FUNCT__
1156 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1157 /*@C
1158    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1159 
1160    Not Collective
1161 
1162    Input Arguments:
1163 + ltog - local to global mapping
1164 - array - array of indices
1165 
1166    Level: advanced
1167 
1168 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1169 @*/
1170 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1171 {
1172   PetscFunctionBegin;
1173   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1174   PetscValidPointer(array,2);
1175   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1176   *array = NULL;
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 #undef __FUNCT__
1181 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1182 /*@C
1183    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1184 
1185    Not Collective
1186 
1187    Input Arguments:
1188 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1189 . n - number of mappings to concatenate
1190 - ltogs - local to global mappings
1191 
1192    Output Arguments:
1193 . ltogcat - new mapping
1194 
1195    Level: advanced
1196 
1197 .seealso: ISLocalToGlobalMappingCreate()
1198 @*/
1199 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1200 {
1201   PetscInt       i,cnt,m,*idx;
1202   PetscErrorCode ierr;
1203 
1204   PetscFunctionBegin;
1205   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1206   if (n > 0) PetscValidPointer(ltogs,3);
1207   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1208   PetscValidPointer(ltogcat,4);
1209   for (cnt=0,i=0; i<n; i++) {
1210     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1211     cnt += m;
1212   }
1213   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1214   for (cnt=0,i=0; i<n; i++) {
1215     const PetscInt *subidx;
1216     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1217     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1218     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1219     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1220     cnt += m;
1221   }
1222   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 
1227