xref: /petsc/src/vec/is/utils/isltog.c (revision b00a91154f763f12aa55f3d53a3f2776f15f49e3)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISG2LMapApply"
10 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
11 {
12   PetscErrorCode ierr;
13   PetscInt       i,start,end;
14 
15   PetscFunctionBegin;
16   if (!mapping->globals) {
17     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
18   }
19   start = mapping->globalstart;
20   end = mapping->globalend;
21   for (i=0; i<n; i++) {
22     if (in[i] < 0)          out[i] = in[i];
23     else if (in[i] < start) out[i] = -1;
24     else if (in[i] > end)   out[i] = -1;
25     else                    out[i] = mapping->globals[in[i] - start];
26   }
27   PetscFunctionReturn(0);
28 }
29 
30 
31 #undef __FUNCT__
32 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
33 /*@C
34     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
35 
36     Not Collective
37 
38     Input Parameter:
39 .   ltog - local to global mapping
40 
41     Output Parameter:
42 .   n - the number of entries in the local mapping
43 
44     Level: advanced
45 
46     Concepts: mapping^local to global
47 
48 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
49 @*/
50 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
51 {
52   PetscFunctionBegin;
53   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
54   PetscValidIntPointer(n,2);
55   *n = mapping->n;
56   PetscFunctionReturn(0);
57 }
58 
59 #undef __FUNCT__
60 #define __FUNCT__ "ISLocalToGlobalMappingView"
61 /*@C
62     ISLocalToGlobalMappingView - View a local to global mapping
63 
64     Not Collective
65 
66     Input Parameters:
67 +   ltog - local to global mapping
68 -   viewer - viewer
69 
70     Level: advanced
71 
72     Concepts: mapping^local to global
73 
74 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
75 @*/
76 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
77 {
78   PetscInt       i;
79   PetscMPIInt    rank;
80   PetscBool      iascii;
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
85   if (!viewer) {
86     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
87   }
88   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
89 
90   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
91   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
92   if (iascii) {
93     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
94     for (i=0; i<mapping->n; i++) {
95       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
96     }
97     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
98     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
99   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
100   PetscFunctionReturn(0);
101 }
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
105 /*@
106     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
107     ordering and a global parallel ordering.
108 
109     Not collective
110 
111     Input Parameter:
112 .   is - index set containing the global numbers for each local number
113 
114     Output Parameter:
115 .   mapping - new mapping data structure
116 
117     Level: advanced
118 
119     Concepts: mapping^local to global
120 
121 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
122 @*/
123 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
124 {
125   PetscErrorCode ierr;
126   PetscInt       n;
127   const PetscInt *indices;
128   MPI_Comm       comm;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(is,IS_CLASSID,1);
132   PetscValidPointer(mapping,2);
133 
134   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
135   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
136   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
137   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
138   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
139   PetscFunctionReturn(0);
140 }
141 
142 #undef __FUNCT__
143 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
144 /*@C
145     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
146     ordering and a global parallel ordering.
147 
148     Collective
149 
150     Input Parameter:
151 +   sf - star forest mapping contiguous local indices to (rank, offset)
152 -   start - first global index on this process
153 
154     Output Parameter:
155 .   mapping - new mapping data structure
156 
157     Level: advanced
158 
159     Concepts: mapping^local to global
160 
161 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
162 @*/
163 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
164 {
165   PetscErrorCode ierr;
166   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
167   const PetscInt *ilocal;
168   MPI_Comm       comm;
169 
170   PetscFunctionBegin;
171   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
172   PetscValidPointer(mapping,3);
173 
174   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
175   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
176   if (ilocal) {
177     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
178   }
179   else maxlocal = nleaves;
180   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
181   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
182   for (i=0; i<nroots; i++) globals[i] = start + i;
183   for (i=0; i<maxlocal; i++) ltog[i] = -1;
184   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
185   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
186   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
187   ierr = PetscFree(globals);CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNCT__
192 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
193 /*@
194     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
195     ordering and a global parallel ordering.
196 
197     Not Collective, but communicator may have more than one process
198 
199     Input Parameters:
200 +   comm - MPI communicator
201 .   n - the number of local elements
202 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
203 -   mode - see PetscCopyMode
204 
205     Output Parameter:
206 .   mapping - new mapping data structure
207 
208     Level: advanced
209 
210     Concepts: mapping^local to global
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
215 {
216   PetscErrorCode ierr;
217   PetscInt       *in;
218 
219   PetscFunctionBegin;
220   if (n) PetscValidIntPointer(indices,3);
221   PetscValidPointer(mapping,4);
222 
223   *mapping = NULL;
224   ierr = ISInitializePackage();CHKERRQ(ierr);
225 
226   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
227                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
228   (*mapping)->n = n;
229   /*
230     Do not create the global to local mapping. This is only created if
231     ISGlobalToLocalMapping() is called
232   */
233   (*mapping)->globals = 0;
234   if (mode == PETSC_COPY_VALUES) {
235     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
236     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
237     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
238     (*mapping)->indices = in;
239   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
240   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
241   PetscFunctionReturn(0);
242 }
243 
244 #undef __FUNCT__
245 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
246 /*@
247     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
248        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
249        and VecSetLocalToGlobalMappingBlock().
250 
251     Not Collective, but communicator may have more than one process
252 
253     Input Parameters:
254 +    inmap - original point-wise mapping
255 -    bs - block size
256 
257     Output Parameter:
258 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
259 
260     Level: advanced
261 
262     Concepts: mapping^local to global
263 
264 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
265 @*/
266 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
267 {
268   PetscErrorCode ierr;
269   PetscInt       *ii,i,n;
270 
271   PetscFunctionBegin;
272   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
273   PetscValidPointer(outmap,3);
274   if (bs > 1) {
275     n = inmap->n/bs;
276     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
277     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
278     for (i=0; i<n; i++) ii[i] = inmap->indices[bs*i]/bs;
279     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
280   } else {
281     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
282     *outmap = inmap;
283   }
284   PetscFunctionReturn(0);
285 }
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
289 /*@
290     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
291        ISLocalToGlobalMapping
292 
293     Not Collective, but communicator may have more than one process
294 
295     Input Parameter:
296 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
297 - bs - block size
298 
299     Output Parameter:
300 .   outmap - pointwise mapping
301 
302     Level: advanced
303 
304     Concepts: mapping^local to global
305 
306 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
307 @*/
308 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
309 {
310   PetscErrorCode ierr;
311   PetscInt       *ii,i,n;
312 
313   PetscFunctionBegin;
314   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
315   PetscValidPointer(outmap,2);
316   if (bs > 1) {
317     n    = inmap->n*bs;
318     ierr = PetscMalloc1(n,&ii);CHKERRQ(ierr);
319     for (i=0; i<n; i++) ii[i] = inmap->indices[i/bs]*bs + (i%bs);
320     ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)inmap),n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
321   } else {
322     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
323     *outmap = inmap;
324   }
325   PetscFunctionReturn(0);
326 }
327 
328 #undef __FUNCT__
329 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
330 /*@
331    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
332    ordering and a global parallel ordering.
333 
334    Note Collective
335 
336    Input Parameters:
337 .  mapping - mapping data structure
338 
339    Level: advanced
340 
341 .seealso: ISLocalToGlobalMappingCreate()
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
344 {
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   if (!*mapping) PetscFunctionReturn(0);
349   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
350   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
351   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
352   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
353   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
354   *mapping = 0;
355   PetscFunctionReturn(0);
356 }
357 
358 #undef __FUNCT__
359 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
360 /*@
361     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
362     a new index set using the global numbering defined in an ISLocalToGlobalMapping
363     context.
364 
365     Not collective
366 
367     Input Parameters:
368 +   mapping - mapping between local and global numbering
369 -   is - index set in local numbering
370 
371     Output Parameters:
372 .   newis - index set in global numbering
373 
374     Level: advanced
375 
376     Concepts: mapping^local to global
377 
378 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
379           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
380 @*/
381 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
382 {
383   PetscErrorCode ierr;
384   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
385   const PetscInt *idxin;
386 
387   PetscFunctionBegin;
388   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
389   PetscValidHeaderSpecific(is,IS_CLASSID,2);
390   PetscValidPointer(newis,3);
391 
392   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
393   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
394   idxmap = mapping->indices;
395 
396   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
397   for (i=0; i<n; i++) {
398     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
399     idxout[i] = idxmap[idxin[i]];
400   }
401   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
402   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "ISLocalToGlobalMappingApply"
408 /*@
409    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
410    and converts them to the global numbering.
411 
412    Not collective
413 
414    Input Parameters:
415 +  mapping - the local to global mapping context
416 .  N - number of integers
417 -  in - input indices in local numbering
418 
419    Output Parameter:
420 .  out - indices in global numbering
421 
422    Notes:
423    The in and out array parameters may be identical.
424 
425    Level: advanced
426 
427 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
428           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
429           AOPetscToApplication(), ISGlobalToLocalMappingApply()
430 
431     Concepts: mapping^local to global
432 @*/
433 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
434 {
435   PetscInt       i,Nmax = mapping->n;
436   const PetscInt *idx = mapping->indices;
437 
438   PetscFunctionBegin;
439   for (i=0; i<N; i++) {
440     if (in[i] < 0) {
441       out[i] = in[i];
442       continue;
443     }
444     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
445     out[i] = idx[in[i]];
446   }
447   PetscFunctionReturn(0);
448 }
449 
450 /* -----------------------------------------------------------------------------------------*/
451 
452 #undef __FUNCT__
453 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
454 /*
455     Creates the global fields in the ISLocalToGlobalMapping structure
456 */
457 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
458 {
459   PetscErrorCode ierr;
460   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
461 
462   PetscFunctionBegin;
463   end   = 0;
464   start = PETSC_MAX_INT;
465 
466   for (i=0; i<n; i++) {
467     if (idx[i] < 0) continue;
468     if (idx[i] < start) start = idx[i];
469     if (idx[i] > end)   end   = idx[i];
470   }
471   if (start > end) {start = 0; end = -1;}
472   mapping->globalstart = start;
473   mapping->globalend   = end;
474 
475   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
476   mapping->globals = globals;
477   for (i=0; i<end-start+1; i++) globals[i] = -1;
478   for (i=0; i<n; i++) {
479     if (idx[i] < 0) continue;
480     globals[idx[i] - start] = i;
481   }
482 
483   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
484   PetscFunctionReturn(0);
485 }
486 
487 #undef __FUNCT__
488 #define __FUNCT__ "ISGlobalToLocalMappingApply"
489 /*@
490     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
491     specified with a global numbering.
492 
493     Not collective
494 
495     Input Parameters:
496 +   mapping - mapping between local and global numbering
497 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
498            IS_GTOLM_DROP - drops the indices with no local value from the output list
499 .   n - number of global indices to map
500 -   idx - global indices to map
501 
502     Output Parameters:
503 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
504 -   idxout - local index of each global index, one must pass in an array long enough
505              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
506              idxout == NULL to determine the required length (returned in nout)
507              and then allocate the required space and call ISGlobalToLocalMappingApply()
508              a second time to set the values.
509 
510     Notes:
511     Either nout or idxout may be NULL. idx and idxout may be identical.
512 
513     This is not scalable in memory usage. Each processor requires O(Nglobal) size
514     array to compute these.
515 
516     Level: advanced
517 
518     Developer Note: The manual page states that idx and idxout may be identical but the calling
519        sequence declares idx as const so it cannot be the same as idxout.
520 
521     Concepts: mapping^global to local
522 
523 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
524           ISLocalToGlobalMappingDestroy()
525 @*/
526 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
527                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
528 {
529   PetscInt       i,*globals,nf = 0,tmp,start,end;
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
534   if (!mapping->globals) {
535     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
536   }
537   globals = mapping->globals;
538   start   = mapping->globalstart;
539   end     = mapping->globalend;
540 
541   if (type == IS_GTOLM_MASK) {
542     if (idxout) {
543       for (i=0; i<n; i++) {
544         if (idx[i] < 0) idxout[i] = idx[i];
545         else if (idx[i] < start) idxout[i] = -1;
546         else if (idx[i] > end)   idxout[i] = -1;
547         else                     idxout[i] = globals[idx[i] - start];
548       }
549     }
550     if (nout) *nout = n;
551   } else {
552     if (idxout) {
553       for (i=0; i<n; i++) {
554         if (idx[i] < 0) continue;
555         if (idx[i] < start) continue;
556         if (idx[i] > end) continue;
557         tmp = globals[idx[i] - start];
558         if (tmp < 0) continue;
559         idxout[nf++] = tmp;
560       }
561     } else {
562       for (i=0; i<n; i++) {
563         if (idx[i] < 0) continue;
564         if (idx[i] < start) continue;
565         if (idx[i] > end) continue;
566         tmp = globals[idx[i] - start];
567         if (tmp < 0) continue;
568         nf++;
569       }
570     }
571     if (nout) *nout = nf;
572   }
573   PetscFunctionReturn(0);
574 }
575 
576 #undef __FUNCT__
577 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
578 /*@C
579     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
580      each index shared by more than one processor
581 
582     Collective on ISLocalToGlobalMapping
583 
584     Input Parameters:
585 .   mapping - the mapping from local to global indexing
586 
587     Output Parameter:
588 +   nproc - number of processors that are connected to this one
589 .   proc - neighboring processors
590 .   numproc - number of indices for each subdomain (processor)
591 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
592 
593     Level: advanced
594 
595     Concepts: mapping^local to global
596 
597     Fortran Usage:
598 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
599 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
600           PetscInt indices[nproc][numprocmax],ierr)
601         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
602         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
603 
604 
605 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
606           ISLocalToGlobalMappingRestoreInfo()
607 @*/
608 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
609 {
610   PetscErrorCode ierr;
611   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
612   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
613   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
614   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
615   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
616   PetscInt       first_procs,first_numprocs,*first_indices;
617   MPI_Request    *recv_waits,*send_waits;
618   MPI_Status     recv_status,*send_status,*recv_statuses;
619   MPI_Comm       comm;
620   PetscBool      debug = PETSC_FALSE;
621 
622   PetscFunctionBegin;
623   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
624   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
625   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
626   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
627   if (size == 1) {
628     *nproc         = 0;
629     *procs         = NULL;
630     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
631     (*numprocs)[0] = 0;
632     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
633     (*indices)[0]  = NULL;
634     PetscFunctionReturn(0);
635   }
636 
637   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
638 
639   /*
640     Notes on ISLocalToGlobalMappingGetInfo
641 
642     globally owned node - the nodes that have been assigned to this processor in global
643            numbering, just for this routine.
644 
645     nontrivial globally owned node - node assigned to this processor that is on a subdomain
646            boundary (i.e. is has more than one local owner)
647 
648     locally owned node - node that exists on this processors subdomain
649 
650     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
651            local subdomain
652   */
653   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
654   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
655   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
656 
657   for (i=0; i<n; i++) {
658     if (lindices[i] > max) max = lindices[i];
659   }
660   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
661   Ng++;
662   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
663   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
664   scale  = Ng/size + 1;
665   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
666   rstart = scale*rank;
667 
668   /* determine ownership ranges of global indices */
669   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
670   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
671 
672   /* determine owners of each local node  */
673   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
674   for (i=0; i<n; i++) {
675     proc             = lindices[i]/scale; /* processor that globally owns this index */
676     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
677     owner[i]         = proc;
678     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
679   }
680   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
681   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
682 
683   /* inform other processors of number of messages and max length*/
684   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
685   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
686 
687   /* post receives for owned rows */
688   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
689   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
690   for (i=0; i<nrecvs; i++) {
691     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
692   }
693 
694   /* pack messages containing lists of local nodes to owners */
695   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
696   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
697   starts[0] = 0;
698   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
699   for (i=0; i<n; i++) {
700     sends[starts[owner[i]]++] = lindices[i];
701     sends[starts[owner[i]]++] = i;
702   }
703   ierr = PetscFree(owner);CHKERRQ(ierr);
704   starts[0] = 0;
705   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
706 
707   /* send the messages */
708   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
709   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
710   cnt = 0;
711   for (i=0; i<size; i++) {
712     if (nprocs[2*i]) {
713       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
714       dest[cnt] = i;
715       cnt++;
716     }
717   }
718   ierr = PetscFree(starts);CHKERRQ(ierr);
719 
720   /* wait on receives */
721   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
722   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
723   cnt  = nrecvs;
724   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
725   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
726   while (cnt) {
727     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
728     /* unpack receives into our local space */
729     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
730     source[imdex] = recv_status.MPI_SOURCE;
731     len[imdex]    = len[imdex]/2;
732     /* count how many local owners for each of my global owned indices */
733     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
734     cnt--;
735   }
736   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
737 
738   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
739   nowned  = 0;
740   nownedm = 0;
741   for (i=0; i<ng; i++) {
742     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
743   }
744 
745   /* create single array to contain rank of all local owners of each globally owned index */
746   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
747   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
748   starts[0] = 0;
749   for (i=1; i<ng; i++) {
750     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
751     else starts[i] = starts[i-1];
752   }
753 
754   /* for each nontrival globally owned node list all arriving processors */
755   for (i=0; i<nrecvs; i++) {
756     for (j=0; j<len[i]; j++) {
757       node = recvs[2*i*nmax+2*j]-rstart;
758       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
759     }
760   }
761 
762   if (debug) { /* -----------------------------------  */
763     starts[0] = 0;
764     for (i=1; i<ng; i++) {
765       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
766       else starts[i] = starts[i-1];
767     }
768     for (i=0; i<ng; i++) {
769       if (nownedsenders[i] > 1) {
770         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
771         for (j=0; j<nownedsenders[i]; j++) {
772           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
773         }
774         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
775       }
776     }
777     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
778   } /* -----------------------------------  */
779 
780   /* wait on original sends */
781   if (nsends) {
782     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
783     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
784     ierr = PetscFree(send_status);CHKERRQ(ierr);
785   }
786   ierr = PetscFree(send_waits);CHKERRQ(ierr);
787   ierr = PetscFree(sends);CHKERRQ(ierr);
788   ierr = PetscFree(nprocs);CHKERRQ(ierr);
789 
790   /* pack messages to send back to local owners */
791   starts[0] = 0;
792   for (i=1; i<ng; i++) {
793     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
794     else starts[i] = starts[i-1];
795   }
796   nsends2 = nrecvs;
797   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
798   for (i=0; i<nrecvs; i++) {
799     nprocs[i] = 1;
800     for (j=0; j<len[i]; j++) {
801       node = recvs[2*i*nmax+2*j]-rstart;
802       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
803     }
804   }
805   nt = 0;
806   for (i=0; i<nsends2; i++) nt += nprocs[i];
807 
808   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
809   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
810 
811   starts2[0] = 0;
812   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
813   /*
814      Each message is 1 + nprocs[i] long, and consists of
815        (0) the number of nodes being sent back
816        (1) the local node number,
817        (2) the number of processors sharing it,
818        (3) the processors sharing it
819   */
820   for (i=0; i<nsends2; i++) {
821     cnt = 1;
822     sends2[starts2[i]] = 0;
823     for (j=0; j<len[i]; j++) {
824       node = recvs[2*i*nmax+2*j]-rstart;
825       if (nownedsenders[node] > 1) {
826         sends2[starts2[i]]++;
827         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
828         sends2[starts2[i]+cnt++] = nownedsenders[node];
829         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
830         cnt += nownedsenders[node];
831       }
832     }
833   }
834 
835   /* receive the message lengths */
836   nrecvs2 = nsends;
837   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
838   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
839   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
840   for (i=0; i<nrecvs2; i++) {
841     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
842   }
843 
844   /* send the message lengths */
845   for (i=0; i<nsends2; i++) {
846     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
847   }
848 
849   /* wait on receives of lens */
850   if (nrecvs2) {
851     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
852     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
853     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
854   }
855   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
856 
857   starts3[0] = 0;
858   nt         = 0;
859   for (i=0; i<nrecvs2-1; i++) {
860     starts3[i+1] = starts3[i] + lens2[i];
861     nt          += lens2[i];
862   }
863   if (nrecvs2) nt += lens2[nrecvs2-1];
864 
865   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
866   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
867   for (i=0; i<nrecvs2; i++) {
868     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
869   }
870 
871   /* send the messages */
872   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
873   for (i=0; i<nsends2; i++) {
874     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
875   }
876 
877   /* wait on receives */
878   if (nrecvs2) {
879     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
880     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
881     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
882   }
883   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
884   ierr = PetscFree(nprocs);CHKERRQ(ierr);
885 
886   if (debug) { /* -----------------------------------  */
887     cnt = 0;
888     for (i=0; i<nrecvs2; i++) {
889       nt = recvs2[cnt++];
890       for (j=0; j<nt; j++) {
891         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
892         for (k=0; k<recvs2[cnt+1]; k++) {
893           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
894         }
895         cnt += 2 + recvs2[cnt+1];
896         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
897       }
898     }
899     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
900   } /* -----------------------------------  */
901 
902   /* count number subdomains for each local node */
903   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
904   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
905   cnt  = 0;
906   for (i=0; i<nrecvs2; i++) {
907     nt = recvs2[cnt++];
908     for (j=0; j<nt; j++) {
909       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
910       cnt += 2 + recvs2[cnt+1];
911     }
912   }
913   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
914   *nproc    = nt;
915   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
916   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
917   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
918   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
919   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
920   cnt       = 0;
921   for (i=0; i<size; i++) {
922     if (nprocs[i] > 0) {
923       bprocs[i]        = cnt;
924       (*procs)[cnt]    = i;
925       (*numprocs)[cnt] = nprocs[i];
926       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
927       cnt++;
928     }
929   }
930 
931   /* make the list of subdomains for each nontrivial local node */
932   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
933   cnt  = 0;
934   for (i=0; i<nrecvs2; i++) {
935     nt = recvs2[cnt++];
936     for (j=0; j<nt; j++) {
937       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
938       cnt += 2 + recvs2[cnt+1];
939     }
940   }
941   ierr = PetscFree(bprocs);CHKERRQ(ierr);
942   ierr = PetscFree(recvs2);CHKERRQ(ierr);
943 
944   /* sort the node indexing by their global numbers */
945   nt = *nproc;
946   for (i=0; i<nt; i++) {
947     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
948     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
949     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
950     ierr = PetscFree(tmp);CHKERRQ(ierr);
951   }
952 
953   if (debug) { /* -----------------------------------  */
954     nt = *nproc;
955     for (i=0; i<nt; i++) {
956       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
957       for (j=0; j<(*numprocs)[i]; j++) {
958         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
959       }
960       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
961     }
962     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
963   } /* -----------------------------------  */
964 
965   /* wait on sends */
966   if (nsends2) {
967     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
968     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
969     ierr = PetscFree(send_status);CHKERRQ(ierr);
970   }
971 
972   ierr = PetscFree(starts3);CHKERRQ(ierr);
973   ierr = PetscFree(dest);CHKERRQ(ierr);
974   ierr = PetscFree(send_waits);CHKERRQ(ierr);
975 
976   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
977   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
978   ierr = PetscFree(starts);CHKERRQ(ierr);
979   ierr = PetscFree(starts2);CHKERRQ(ierr);
980   ierr = PetscFree(lens2);CHKERRQ(ierr);
981 
982   ierr = PetscFree(source);CHKERRQ(ierr);
983   ierr = PetscFree(len);CHKERRQ(ierr);
984   ierr = PetscFree(recvs);CHKERRQ(ierr);
985   ierr = PetscFree(nprocs);CHKERRQ(ierr);
986   ierr = PetscFree(sends2);CHKERRQ(ierr);
987 
988   /* put the information about myself as the first entry in the list */
989   first_procs    = (*procs)[0];
990   first_numprocs = (*numprocs)[0];
991   first_indices  = (*indices)[0];
992   for (i=0; i<*nproc; i++) {
993     if ((*procs)[i] == rank) {
994       (*procs)[0]    = (*procs)[i];
995       (*numprocs)[0] = (*numprocs)[i];
996       (*indices)[0]  = (*indices)[i];
997       (*procs)[i]    = first_procs;
998       (*numprocs)[i] = first_numprocs;
999       (*indices)[i]  = first_indices;
1000       break;
1001     }
1002   }
1003   PetscFunctionReturn(0);
1004 }
1005 
1006 #undef __FUNCT__
1007 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1008 /*@C
1009     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1010 
1011     Collective on ISLocalToGlobalMapping
1012 
1013     Input Parameters:
1014 .   mapping - the mapping from local to global indexing
1015 
1016     Output Parameter:
1017 +   nproc - number of processors that are connected to this one
1018 .   proc - neighboring processors
1019 .   numproc - number of indices for each processor
1020 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1021 
1022     Level: advanced
1023 
1024 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1025           ISLocalToGlobalMappingGetInfo()
1026 @*/
1027 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1028 {
1029   PetscErrorCode ierr;
1030   PetscInt       i;
1031 
1032   PetscFunctionBegin;
1033   ierr = PetscFree(*procs);CHKERRQ(ierr);
1034   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1035   if (*indices) {
1036     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1037     for (i=1; i<*nproc; i++) {
1038       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1039     }
1040     ierr = PetscFree(*indices);CHKERRQ(ierr);
1041   }
1042   PetscFunctionReturn(0);
1043 }
1044 
1045 #undef __FUNCT__
1046 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1047 /*@C
1048    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1049 
1050    Not Collective
1051 
1052    Input Arguments:
1053 . ltog - local to global mapping
1054 
1055    Output Arguments:
1056 . array - array of indices
1057 
1058    Level: advanced
1059 
1060 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1061 @*/
1062 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1063 {
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1066   PetscValidPointer(array,2);
1067   *array = ltog->indices;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 #undef __FUNCT__
1072 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1073 /*@C
1074    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1075 
1076    Not Collective
1077 
1078    Input Arguments:
1079 + ltog - local to global mapping
1080 - array - array of indices
1081 
1082    Level: advanced
1083 
1084 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1085 @*/
1086 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1087 {
1088   PetscFunctionBegin;
1089   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1090   PetscValidPointer(array,2);
1091   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1092   *array = NULL;
1093   PetscFunctionReturn(0);
1094 }
1095 
1096 #undef __FUNCT__
1097 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1098 /*@C
1099    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1100 
1101    Not Collective
1102 
1103    Input Arguments:
1104 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1105 . n - number of mappings to concatenate
1106 - ltogs - local to global mappings
1107 
1108    Output Arguments:
1109 . ltogcat - new mapping
1110 
1111    Level: advanced
1112 
1113 .seealso: ISLocalToGlobalMappingCreate()
1114 @*/
1115 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1116 {
1117   PetscInt       i,cnt,m,*idx;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1122   if (n > 0) PetscValidPointer(ltogs,3);
1123   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1124   PetscValidPointer(ltogcat,4);
1125   for (cnt=0,i=0; i<n; i++) {
1126     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1127     cnt += m;
1128   }
1129   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1130   for (cnt=0,i=0; i<n; i++) {
1131     const PetscInt *subidx;
1132     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1133     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1134     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1135     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1136     cnt += m;
1137   }
1138   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1139   PetscFunctionReturn(0);
1140 }
1141