xref: /petsc/src/vec/is/utils/isltog.c (revision 7afc1a8b0cbfcb157f2be8aa81c6cdb9b187c271)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@C
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
95     for (i=0; i<mapping->n; i++) {
96       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
97     }
98     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
99     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
100   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
106 /*@
107     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
108     ordering and a global parallel ordering.
109 
110     Not collective
111 
112     Input Parameter:
113 .   is - index set containing the global numbers for each local number
114 
115     Output Parameter:
116 .   mapping - new mapping data structure
117 
118     Level: advanced
119 
120     Concepts: mapping^local to global
121 
122 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
123 @*/
124 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
125 {
126   PetscErrorCode ierr;
127   PetscInt       n,bs;
128   const PetscInt *indices;
129   MPI_Comm       comm;
130   PetscBool      isblock;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(is,IS_CLASSID,1);
134   PetscValidPointer(mapping,2);
135 
136   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
137   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
138   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
139   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
140   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
141   if (!isblock || bs == 1) {
142     ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
143     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
144   } else {
145     ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
146   }
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
152 /*@C
153     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
154     ordering and a global parallel ordering.
155 
156     Collective
157 
158     Input Parameter:
159 +   sf - star forest mapping contiguous local indices to (rank, offset)
160 -   start - first global index on this process
161 
162     Output Parameter:
163 .   mapping - new mapping data structure
164 
165     Level: advanced
166 
167     Concepts: mapping^local to global
168 
169 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
170 @*/
171 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
172 {
173   PetscErrorCode ierr;
174   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
175   const PetscInt *ilocal;
176   MPI_Comm       comm;
177 
178   PetscFunctionBegin;
179   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
180   PetscValidPointer(mapping,3);
181 
182   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
183   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
184   if (ilocal) {
185     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
186   }
187   else maxlocal = nleaves;
188   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
189   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
190   for (i=0; i<nroots; i++) globals[i] = start + i;
191   for (i=0; i<maxlocal; i++) ltog[i] = -1;
192   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
193   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
194   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
195   ierr = PetscFree(globals);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
201 /*@
202     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
203     ordering and a global parallel ordering.
204 
205     Not Collective, but communicator may have more than one process
206 
207     Input Parameters:
208 +   comm - MPI communicator
209 .   n - the number of local elements
210 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
211 -   mode - see PetscCopyMode
212 
213     Output Parameter:
214 .   mapping - new mapping data structure
215 
216     Level: advanced
217 
218     Concepts: mapping^local to global
219 
220 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
221 @*/
222 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
223 {
224   PetscErrorCode ierr;
225   PetscInt       *in;
226 
227   PetscFunctionBegin;
228   if (n) PetscValidIntPointer(indices,3);
229   PetscValidPointer(mapping,4);
230 
231   *mapping = NULL;
232   ierr = ISInitializePackage();CHKERRQ(ierr);
233 
234   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
235                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
236   (*mapping)->n = n;
237   /*
238     Do not create the global to local mapping. This is only created if
239     ISGlobalToLocalMapping() is called
240   */
241   (*mapping)->globals = 0;
242   if (mode == PETSC_COPY_VALUES) {
243     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
244     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
245     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
246     (*mapping)->indices = in;
247   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
248   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
254 /*@
255     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
256        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
257        and VecSetLocalToGlobalMappingBlock().
258 
259     Not Collective, but communicator may have more than one process
260 
261     Input Parameters:
262 +    inmap - original point-wise mapping
263 -    bs - block size
264 
265     Output Parameter:
266 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
267 
268     Level: advanced
269 
270     Concepts: mapping^local to global
271 
272 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
273 @*/
274 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
275 {
276   PetscErrorCode ierr;
277   PetscInt       *ii,i,n;
278 
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
281   PetscValidPointer(outmap,3);
282   if (bs > 1) {
283     n = inmap->n/bs;
284     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
285     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
286     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
287     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
288   } else {
289     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
290     *outmap = inmap;
291   }
292   PetscFunctionReturn(0);
293 }
294 
295 #undef __FUNCT__
296 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
297 /*@
298     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
299        ISLocalToGlobalMapping
300 
301     Not Collective, but communicator may have more than one process
302 
303     Input Parameter:
304 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
305 - bs - block size
306 
307     Output Parameter:
308 .   outmap - pointwise mapping
309 
310     Level: advanced
311 
312     Concepts: mapping^local to global
313 
314 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
315 @*/
316 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
317 {
318   PetscErrorCode ierr;
319   PetscInt       *ii,i,n;
320 
321   PetscFunctionBegin;
322   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
323   PetscValidPointer(outmap,2);
324   if (bs > 1) {
325     n    = inmap->n*bs;
326     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
327     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
328     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
329   } else {
330     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
331     *outmap = inmap;
332   }
333   PetscFunctionReturn(0);
334 }
335 
336 #undef __FUNCT__
337 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
338 /*@
339    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
340    ordering and a global parallel ordering.
341 
342    Note Collective
343 
344    Input Parameters:
345 .  mapping - mapping data structure
346 
347    Level: advanced
348 
349 .seealso: ISLocalToGlobalMappingCreate()
350 @*/
351 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
352 {
353   PetscErrorCode ierr;
354 
355   PetscFunctionBegin;
356   if (!*mapping) PetscFunctionReturn(0);
357   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
358   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
359   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
360   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
361   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
362   *mapping = 0;
363   PetscFunctionReturn(0);
364 }
365 
366 #undef __FUNCT__
367 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
368 /*@
369     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
370     a new index set using the global numbering defined in an ISLocalToGlobalMapping
371     context.
372 
373     Not collective
374 
375     Input Parameters:
376 +   mapping - mapping between local and global numbering
377 -   is - index set in local numbering
378 
379     Output Parameters:
380 .   newis - index set in global numbering
381 
382     Level: advanced
383 
384     Concepts: mapping^local to global
385 
386 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
387           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
388 @*/
389 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
390 {
391   PetscErrorCode ierr;
392   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
393   const PetscInt *idxin;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
397   PetscValidHeaderSpecific(is,IS_CLASSID,2);
398   PetscValidPointer(newis,3);
399 
400   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
401   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
402   idxmap = mapping->indices;
403 
404   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
405   for (i=0; i<n; i++) {
406     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",idxin[i],Nmax-1,i);
407     idxout[i] = idxmap[idxin[i]];
408   }
409   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
410   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
411   PetscFunctionReturn(0);
412 }
413 
414 #undef __FUNCT__
415 #define __FUNCT__ "ISLocalToGlobalMappingApply"
416 /*@
417    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
418    and converts them to the global numbering.
419 
420    Not collective
421 
422    Input Parameters:
423 +  mapping - the local to global mapping context
424 .  N - number of integers
425 -  in - input indices in local numbering
426 
427    Output Parameter:
428 .  out - indices in global numbering
429 
430    Notes:
431    The in and out array parameters may be identical.
432 
433    Level: advanced
434 
435 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
436           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
437           AOPetscToApplication(), ISGlobalToLocalMappingApply()
438 
439     Concepts: mapping^local to global
440 @*/
441 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
442 {
443   PetscInt       i,Nmax = mapping->n;
444   const PetscInt *idx = mapping->indices;
445 
446   PetscFunctionBegin;
447   for (i=0; i<N; i++) {
448     if (in[i] < 0) {
449       out[i] = in[i];
450       continue;
451     }
452     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
453     out[i] = idx[in[i]];
454   }
455   PetscFunctionReturn(0);
456 }
457 
458 /* -----------------------------------------------------------------------------------------*/
459 
460 #undef __FUNCT__
461 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
462 /*
463     Creates the global fields in the ISLocalToGlobalMapping structure
464 */
465 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
466 {
467   PetscErrorCode ierr;
468   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
469 
470   PetscFunctionBegin;
471   end   = 0;
472   start = PETSC_MAX_INT;
473 
474   for (i=0; i<n; i++) {
475     if (idx[i] < 0) continue;
476     if (idx[i] < start) start = idx[i];
477     if (idx[i] > end)   end   = idx[i];
478   }
479   if (start > end) {start = 0; end = -1;}
480   mapping->globalstart = start;
481   mapping->globalend   = end;
482 
483   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
484   mapping->globals = globals;
485   for (i=0; i<end-start+1; i++) globals[i] = -1;
486   for (i=0; i<n; i++) {
487     if (idx[i] < 0) continue;
488     globals[idx[i] - start] = i;
489   }
490 
491   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
492   PetscFunctionReturn(0);
493 }
494 
495 #undef __FUNCT__
496 #define __FUNCT__ "ISGlobalToLocalMappingApply"
497 /*@
498     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
499     specified with a global numbering.
500 
501     Not collective
502 
503     Input Parameters:
504 +   mapping - mapping between local and global numbering
505 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
506            IS_GTOLM_DROP - drops the indices with no local value from the output list
507 .   n - number of global indices to map
508 -   idx - global indices to map
509 
510     Output Parameters:
511 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
512 -   idxout - local index of each global index, one must pass in an array long enough
513              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
514              idxout == NULL to determine the required length (returned in nout)
515              and then allocate the required space and call ISGlobalToLocalMappingApply()
516              a second time to set the values.
517 
518     Notes:
519     Either nout or idxout may be NULL. idx and idxout may be identical.
520 
521     This is not scalable in memory usage. Each processor requires O(Nglobal) size
522     array to compute these.
523 
524     Level: advanced
525 
526     Developer Note: The manual page states that idx and idxout may be identical but the calling
527        sequence declares idx as const so it cannot be the same as idxout.
528 
529     Concepts: mapping^global to local
530 
531 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
532           ISLocalToGlobalMappingDestroy()
533 @*/
534 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
535                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
536 {
537   PetscInt       i,*globals,nf = 0,tmp,start,end;
538   PetscErrorCode ierr;
539 
540   PetscFunctionBegin;
541   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
542   if (!mapping->globals) {
543     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
544   }
545   globals = mapping->globals;
546   start   = mapping->globalstart;
547   end     = mapping->globalend;
548 
549   if (type == IS_GTOLM_MASK) {
550     if (idxout) {
551       for (i=0; i<n; i++) {
552         if (idx[i] < 0) idxout[i] = idx[i];
553         else if (idx[i] < start) idxout[i] = -1;
554         else if (idx[i] > end)   idxout[i] = -1;
555         else                     idxout[i] = globals[idx[i] - start];
556       }
557     }
558     if (nout) *nout = n;
559   } else {
560     if (idxout) {
561       for (i=0; i<n; i++) {
562         if (idx[i] < 0) continue;
563         if (idx[i] < start) continue;
564         if (idx[i] > end) continue;
565         tmp = globals[idx[i] - start];
566         if (tmp < 0) continue;
567         idxout[nf++] = tmp;
568       }
569     } else {
570       for (i=0; i<n; i++) {
571         if (idx[i] < 0) continue;
572         if (idx[i] < start) continue;
573         if (idx[i] > end) continue;
574         tmp = globals[idx[i] - start];
575         if (tmp < 0) continue;
576         nf++;
577       }
578     }
579     if (nout) *nout = nf;
580   }
581   PetscFunctionReturn(0);
582 }
583 
584 #undef __FUNCT__
585 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
586 /*@C
587     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
588      each index shared by more than one processor
589 
590     Collective on ISLocalToGlobalMapping
591 
592     Input Parameters:
593 .   mapping - the mapping from local to global indexing
594 
595     Output Parameter:
596 +   nproc - number of processors that are connected to this one
597 .   proc - neighboring processors
598 .   numproc - number of indices for each subdomain (processor)
599 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
600 
601     Level: advanced
602 
603     Concepts: mapping^local to global
604 
605     Fortran Usage:
606 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
607 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
608           PetscInt indices[nproc][numprocmax],ierr)
609         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
610         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
611 
612 
613 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
614           ISLocalToGlobalMappingRestoreInfo()
615 @*/
616 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
617 {
618   PetscErrorCode ierr;
619   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
620   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
621   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
622   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
623   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
624   PetscInt       first_procs,first_numprocs,*first_indices;
625   MPI_Request    *recv_waits,*send_waits;
626   MPI_Status     recv_status,*send_status,*recv_statuses;
627   MPI_Comm       comm;
628   PetscBool      debug = PETSC_FALSE;
629 
630   PetscFunctionBegin;
631   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
632   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
633   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
634   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
635   if (size == 1) {
636     *nproc         = 0;
637     *procs         = NULL;
638     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
639     (*numprocs)[0] = 0;
640     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
641     (*indices)[0]  = NULL;
642     PetscFunctionReturn(0);
643   }
644 
645   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
646 
647   /*
648     Notes on ISLocalToGlobalMappingGetInfo
649 
650     globally owned node - the nodes that have been assigned to this processor in global
651            numbering, just for this routine.
652 
653     nontrivial globally owned node - node assigned to this processor that is on a subdomain
654            boundary (i.e. is has more than one local owner)
655 
656     locally owned node - node that exists on this processors subdomain
657 
658     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
659            local subdomain
660   */
661   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
662   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
663   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
664 
665   for (i=0; i<n; i++) {
666     if (lindices[i] > max) max = lindices[i];
667   }
668   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
669   Ng++;
670   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
671   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
672   scale  = Ng/size + 1;
673   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
674   rstart = scale*rank;
675 
676   /* determine ownership ranges of global indices */
677   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
678   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
679 
680   /* determine owners of each local node  */
681   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
682   for (i=0; i<n; i++) {
683     proc             = lindices[i]/scale; /* processor that globally owns this index */
684     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
685     owner[i]         = proc;
686     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
687   }
688   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
689   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
690 
691   /* inform other processors of number of messages and max length*/
692   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
693   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
694 
695   /* post receives for owned rows */
696   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
697   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
698   for (i=0; i<nrecvs; i++) {
699     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
700   }
701 
702   /* pack messages containing lists of local nodes to owners */
703   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
704   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
705   starts[0] = 0;
706   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
707   for (i=0; i<n; i++) {
708     sends[starts[owner[i]]++] = lindices[i];
709     sends[starts[owner[i]]++] = i;
710   }
711   ierr = PetscFree(owner);CHKERRQ(ierr);
712   starts[0] = 0;
713   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
714 
715   /* send the messages */
716   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
717   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
718   cnt = 0;
719   for (i=0; i<size; i++) {
720     if (nprocs[2*i]) {
721       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
722       dest[cnt] = i;
723       cnt++;
724     }
725   }
726   ierr = PetscFree(starts);CHKERRQ(ierr);
727 
728   /* wait on receives */
729   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
730   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
731   cnt  = nrecvs;
732   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
733   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
734   while (cnt) {
735     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
736     /* unpack receives into our local space */
737     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
738     source[imdex] = recv_status.MPI_SOURCE;
739     len[imdex]    = len[imdex]/2;
740     /* count how many local owners for each of my global owned indices */
741     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
742     cnt--;
743   }
744   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
745 
746   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
747   nowned  = 0;
748   nownedm = 0;
749   for (i=0; i<ng; i++) {
750     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
751   }
752 
753   /* create single array to contain rank of all local owners of each globally owned index */
754   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
755   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
756   starts[0] = 0;
757   for (i=1; i<ng; i++) {
758     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
759     else starts[i] = starts[i-1];
760   }
761 
762   /* for each nontrival globally owned node list all arriving processors */
763   for (i=0; i<nrecvs; i++) {
764     for (j=0; j<len[i]; j++) {
765       node = recvs[2*i*nmax+2*j]-rstart;
766       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
767     }
768   }
769 
770   if (debug) { /* -----------------------------------  */
771     starts[0] = 0;
772     for (i=1; i<ng; i++) {
773       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
774       else starts[i] = starts[i-1];
775     }
776     for (i=0; i<ng; i++) {
777       if (nownedsenders[i] > 1) {
778         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
779         for (j=0; j<nownedsenders[i]; j++) {
780           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
781         }
782         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
783       }
784     }
785     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
786   } /* -----------------------------------  */
787 
788   /* wait on original sends */
789   if (nsends) {
790     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
791     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
792     ierr = PetscFree(send_status);CHKERRQ(ierr);
793   }
794   ierr = PetscFree(send_waits);CHKERRQ(ierr);
795   ierr = PetscFree(sends);CHKERRQ(ierr);
796   ierr = PetscFree(nprocs);CHKERRQ(ierr);
797 
798   /* pack messages to send back to local owners */
799   starts[0] = 0;
800   for (i=1; i<ng; i++) {
801     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
802     else starts[i] = starts[i-1];
803   }
804   nsends2 = nrecvs;
805   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
806   for (i=0; i<nrecvs; i++) {
807     nprocs[i] = 1;
808     for (j=0; j<len[i]; j++) {
809       node = recvs[2*i*nmax+2*j]-rstart;
810       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
811     }
812   }
813   nt = 0;
814   for (i=0; i<nsends2; i++) nt += nprocs[i];
815 
816   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
817   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
818 
819   starts2[0] = 0;
820   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
821   /*
822      Each message is 1 + nprocs[i] long, and consists of
823        (0) the number of nodes being sent back
824        (1) the local node number,
825        (2) the number of processors sharing it,
826        (3) the processors sharing it
827   */
828   for (i=0; i<nsends2; i++) {
829     cnt = 1;
830     sends2[starts2[i]] = 0;
831     for (j=0; j<len[i]; j++) {
832       node = recvs[2*i*nmax+2*j]-rstart;
833       if (nownedsenders[node] > 1) {
834         sends2[starts2[i]]++;
835         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
836         sends2[starts2[i]+cnt++] = nownedsenders[node];
837         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
838         cnt += nownedsenders[node];
839       }
840     }
841   }
842 
843   /* receive the message lengths */
844   nrecvs2 = nsends;
845   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
846   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
847   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
848   for (i=0; i<nrecvs2; i++) {
849     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
850   }
851 
852   /* send the message lengths */
853   for (i=0; i<nsends2; i++) {
854     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
855   }
856 
857   /* wait on receives of lens */
858   if (nrecvs2) {
859     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
860     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
861     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
862   }
863   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
864 
865   starts3[0] = 0;
866   nt         = 0;
867   for (i=0; i<nrecvs2-1; i++) {
868     starts3[i+1] = starts3[i] + lens2[i];
869     nt          += lens2[i];
870   }
871   if (nrecvs2) nt += lens2[nrecvs2-1];
872 
873   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
874   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
875   for (i=0; i<nrecvs2; i++) {
876     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
877   }
878 
879   /* send the messages */
880   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
881   for (i=0; i<nsends2; i++) {
882     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
883   }
884 
885   /* wait on receives */
886   if (nrecvs2) {
887     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
888     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
889     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
890   }
891   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
892   ierr = PetscFree(nprocs);CHKERRQ(ierr);
893 
894   if (debug) { /* -----------------------------------  */
895     cnt = 0;
896     for (i=0; i<nrecvs2; i++) {
897       nt = recvs2[cnt++];
898       for (j=0; j<nt; j++) {
899         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
900         for (k=0; k<recvs2[cnt+1]; k++) {
901           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
902         }
903         cnt += 2 + recvs2[cnt+1];
904         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
905       }
906     }
907     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
908   } /* -----------------------------------  */
909 
910   /* count number subdomains for each local node */
911   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
912   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
913   cnt  = 0;
914   for (i=0; i<nrecvs2; i++) {
915     nt = recvs2[cnt++];
916     for (j=0; j<nt; j++) {
917       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
918       cnt += 2 + recvs2[cnt+1];
919     }
920   }
921   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
922   *nproc    = nt;
923   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
924   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
925   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
926   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
927   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
928   cnt       = 0;
929   for (i=0; i<size; i++) {
930     if (nprocs[i] > 0) {
931       bprocs[i]        = cnt;
932       (*procs)[cnt]    = i;
933       (*numprocs)[cnt] = nprocs[i];
934       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
935       cnt++;
936     }
937   }
938 
939   /* make the list of subdomains for each nontrivial local node */
940   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
941   cnt  = 0;
942   for (i=0; i<nrecvs2; i++) {
943     nt = recvs2[cnt++];
944     for (j=0; j<nt; j++) {
945       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
946       cnt += 2 + recvs2[cnt+1];
947     }
948   }
949   ierr = PetscFree(bprocs);CHKERRQ(ierr);
950   ierr = PetscFree(recvs2);CHKERRQ(ierr);
951 
952   /* sort the node indexing by their global numbers */
953   nt = *nproc;
954   for (i=0; i<nt; i++) {
955     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
956     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
957     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
958     ierr = PetscFree(tmp);CHKERRQ(ierr);
959   }
960 
961   if (debug) { /* -----------------------------------  */
962     nt = *nproc;
963     for (i=0; i<nt; i++) {
964       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
965       for (j=0; j<(*numprocs)[i]; j++) {
966         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
967       }
968       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
969     }
970     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
971   } /* -----------------------------------  */
972 
973   /* wait on sends */
974   if (nsends2) {
975     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
976     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
977     ierr = PetscFree(send_status);CHKERRQ(ierr);
978   }
979 
980   ierr = PetscFree(starts3);CHKERRQ(ierr);
981   ierr = PetscFree(dest);CHKERRQ(ierr);
982   ierr = PetscFree(send_waits);CHKERRQ(ierr);
983 
984   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
985   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
986   ierr = PetscFree(starts);CHKERRQ(ierr);
987   ierr = PetscFree(starts2);CHKERRQ(ierr);
988   ierr = PetscFree(lens2);CHKERRQ(ierr);
989 
990   ierr = PetscFree(source);CHKERRQ(ierr);
991   ierr = PetscFree(len);CHKERRQ(ierr);
992   ierr = PetscFree(recvs);CHKERRQ(ierr);
993   ierr = PetscFree(nprocs);CHKERRQ(ierr);
994   ierr = PetscFree(sends2);CHKERRQ(ierr);
995 
996   /* put the information about myself as the first entry in the list */
997   first_procs    = (*procs)[0];
998   first_numprocs = (*numprocs)[0];
999   first_indices  = (*indices)[0];
1000   for (i=0; i<*nproc; i++) {
1001     if ((*procs)[i] == rank) {
1002       (*procs)[0]    = (*procs)[i];
1003       (*numprocs)[0] = (*numprocs)[i];
1004       (*indices)[0]  = (*indices)[i];
1005       (*procs)[i]    = first_procs;
1006       (*numprocs)[i] = first_numprocs;
1007       (*indices)[i]  = first_indices;
1008       break;
1009     }
1010   }
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 #undef __FUNCT__
1015 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1016 /*@C
1017     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1018 
1019     Collective on ISLocalToGlobalMapping
1020 
1021     Input Parameters:
1022 .   mapping - the mapping from local to global indexing
1023 
1024     Output Parameter:
1025 +   nproc - number of processors that are connected to this one
1026 .   proc - neighboring processors
1027 .   numproc - number of indices for each processor
1028 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1029 
1030     Level: advanced
1031 
1032 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1033           ISLocalToGlobalMappingGetInfo()
1034 @*/
1035 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1036 {
1037   PetscErrorCode ierr;
1038   PetscInt       i;
1039 
1040   PetscFunctionBegin;
1041   ierr = PetscFree(*procs);CHKERRQ(ierr);
1042   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1043   if (*indices) {
1044     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1045     for (i=1; i<*nproc; i++) {
1046       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1047     }
1048     ierr = PetscFree(*indices);CHKERRQ(ierr);
1049   }
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 #undef __FUNCT__
1054 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1055 /*@C
1056    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1057 
1058    Not Collective
1059 
1060    Input Arguments:
1061 . ltog - local to global mapping
1062 
1063    Output Arguments:
1064 . array - array of indices
1065 
1066    Level: advanced
1067 
1068 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1069 @*/
1070 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1071 {
1072   PetscFunctionBegin;
1073   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1074   PetscValidPointer(array,2);
1075   *array = ltog->indices;
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1081 /*@C
1082    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1083 
1084    Not Collective
1085 
1086    Input Arguments:
1087 + ltog - local to global mapping
1088 - array - array of indices
1089 
1090    Level: advanced
1091 
1092 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1093 @*/
1094 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1095 {
1096   PetscFunctionBegin;
1097   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1098   PetscValidPointer(array,2);
1099   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1100   *array = NULL;
1101   PetscFunctionReturn(0);
1102 }
1103 
1104 #undef __FUNCT__
1105 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1106 /*@C
1107    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1108 
1109    Not Collective
1110 
1111    Input Arguments:
1112 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1113 . n - number of mappings to concatenate
1114 - ltogs - local to global mappings
1115 
1116    Output Arguments:
1117 . ltogcat - new mapping
1118 
1119    Level: advanced
1120 
1121 .seealso: ISLocalToGlobalMappingCreate()
1122 @*/
1123 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1124 {
1125   PetscInt       i,cnt,m,*idx;
1126   PetscErrorCode ierr;
1127 
1128   PetscFunctionBegin;
1129   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1130   if (n > 0) PetscValidPointer(ltogs,3);
1131   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1132   PetscValidPointer(ltogcat,4);
1133   for (cnt=0,i=0; i<n; i++) {
1134     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1135     cnt += m;
1136   }
1137   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1138   for (cnt=0,i=0; i<n; i++) {
1139     const PetscInt *subidx;
1140     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1141     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1142     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1143     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1144     cnt += m;
1145   }
1146   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1147   PetscFunctionReturn(0);
1148 }
1149 
1150 
1151