xref: /petsc/src/vec/is/utils/isltog.c (revision 45b6f7e9ecd564e8bb535880b270d4a8084ff211)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@C
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
142   /*  if (!isblock) { */
143     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
144     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
145     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
146     /*  } else {
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150      }*/
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   *bs = mapping->bs;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
232 /*@
233     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234     ordering and a global parallel ordering.
235 
236     Not Collective, but communicator may have more than one process
237 
238     Input Parameters:
239 +   comm - MPI communicator
240 .   bs - the block size
241 .   n - the number of local elements
242 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled by the blocksize bs
243 -   mode - see PetscCopyMode
244 
245     Output Parameter:
246 .   mapping - new mapping data structure
247 
248     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249     Level: advanced
250 
251     Concepts: mapping^local to global
252 
253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254 @*/
255 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256 {
257   PetscErrorCode ierr;
258   PetscInt       *in;
259 
260   PetscFunctionBegin;
261   if (n) PetscValidIntPointer(indices,3);
262   PetscValidPointer(mapping,4);
263 
264   *mapping = NULL;
265   ierr = ISInitializePackage();CHKERRQ(ierr);
266 
267   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
269   (*mapping)->n  = n;
270   (*mapping)->bs = bs;
271   /*
272     Do not create the global to local mapping. This is only created if
273     ISGlobalToLocalMapping() is called
274   */
275   (*mapping)->globals = 0;
276   if (mode == PETSC_COPY_VALUES) {
277     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
278     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
279     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
280     (*mapping)->indices = in;
281   } else if (mode == PETSC_OWN_POINTER) (*mapping)->indices = (PetscInt*)indices;
282   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
283   PetscFunctionReturn(0);
284 }
285 
286 #undef __FUNCT__
287 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
288 /*@
289    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
290    ordering and a global parallel ordering.
291 
292    Note Collective
293 
294    Input Parameters:
295 .  mapping - mapping data structure
296 
297    Level: advanced
298 
299 .seealso: ISLocalToGlobalMappingCreate()
300 @*/
301 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
302 {
303   PetscErrorCode ierr;
304 
305   PetscFunctionBegin;
306   if (!*mapping) PetscFunctionReturn(0);
307   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
308   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
309   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
310   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
311   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
312   *mapping = 0;
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
318 /*@
319     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
320     a new index set using the global numbering defined in an ISLocalToGlobalMapping
321     context.
322 
323     Not collective
324 
325     Input Parameters:
326 +   mapping - mapping between local and global numbering
327 -   is - index set in local numbering
328 
329     Output Parameters:
330 .   newis - index set in global numbering
331 
332     Level: advanced
333 
334     Concepts: mapping^local to global
335 
336 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
337           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
338 @*/
339 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
340 {
341   PetscErrorCode ierr;
342   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
343   const PetscInt *idxin;
344 
345   PetscFunctionBegin;
346   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
347   PetscValidHeaderSpecific(is,IS_CLASSID,2);
348   PetscValidPointer(newis,3);
349 
350   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
351   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
352   idxmap = mapping->indices;
353 
354   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
355   for (i=0; i<n; i++) {
356     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",idxin[i],Nmax-1,i);
357     idxout[i] = idxmap[idxin[i]];
358   }
359   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
360   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "ISLocalToGlobalMappingApply"
366 /*@
367    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
368    and converts them to the global numbering.
369 
370    Not collective
371 
372    Input Parameters:
373 +  mapping - the local to global mapping context
374 .  N - number of integers
375 -  in - input indices in local numbering
376 
377    Output Parameter:
378 .  out - indices in global numbering
379 
380    Notes:
381    The in and out array parameters may be identical.
382 
383    Level: advanced
384 
385 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
386           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
387           AOPetscToApplication(), ISGlobalToLocalMappingApply()
388 
389     Concepts: mapping^local to global
390 @*/
391 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
392 {
393   PetscInt       i,Nmax = mapping->n,bs = mapping->bs;
394   const PetscInt *idx = mapping->indices;
395 
396   PetscFunctionBegin;
397   if (bs == 1) {
398     for (i=0; i<N; i++) {
399       if (in[i] < 0) {
400         out[i] = in[i];
401         continue;
402       }
403       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
404       out[i] = idx[in[i]];
405     }
406   } else {
407     for (i=0; i<N; i++) {
408       if (in[i] < 0) {
409         out[i] = in[i];
410         continue;
411       }
412       if (in[i] >= bs*Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax,i);
413       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
414     }
415   }
416   PetscFunctionReturn(0);
417 }
418 
419 #undef __FUNCT__
420 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
421 /*@
422    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local numbering  and converts them to the global numbering.
423 
424    Not collective
425 
426    Input Parameters:
427 +  mapping - the local to global mapping context
428 .  N - number of integers
429 -  in - input indices in local numbering
430 
431    Output Parameter:
432 .  out - indices in global numbering
433 
434    Notes:
435    The in and out array parameters may be identical.
436 
437    Level: advanced
438 
439 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
440           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
441           AOPetscToApplication(), ISGlobalToLocalMappingApply()
442 
443     Concepts: mapping^local to global
444 @*/
445 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
446 {
447   PetscInt       i,Nmax = mapping->n;
448   const PetscInt *idx = mapping->indices;
449 
450   PetscFunctionBegin;
451   for (i=0; i<N; i++) {
452     if (in[i] < 0) {
453       out[i] = in[i];
454       continue;
455     }
456     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i]*mapping->bs,Nmax,i);
457     out[i] = idx[in[i]];
458   }
459   PetscFunctionReturn(0);
460 }
461 
462 /* -----------------------------------------------------------------------------------------*/
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
466 /*
467     Creates the global fields in the ISLocalToGlobalMapping structure
468 */
469 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
470 {
471   PetscErrorCode ierr;
472   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
473 
474   PetscFunctionBegin;
475   end   = 0;
476   start = PETSC_MAX_INT;
477 
478   for (i=0; i<n; i++) {
479     if (idx[i] < 0) continue;
480     if (idx[i] < start) start = idx[i];
481     if (idx[i] > end)   end   = idx[i];
482   }
483   if (start > end) {start = 0; end = -1;}
484   mapping->globalstart = start;
485   mapping->globalend   = end;
486 
487   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
488   mapping->globals = globals;
489   for (i=0; i<end-start+1; i++) globals[i] = -1;
490   for (i=0; i<n; i++) {
491     if (idx[i] < 0) continue;
492     globals[idx[i] - start] = i;
493   }
494 
495   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
496   PetscFunctionReturn(0);
497 }
498 
499 #undef __FUNCT__
500 #define __FUNCT__ "ISGlobalToLocalMappingApply"
501 /*@
502     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
503     specified with a global numbering.
504 
505     Not collective
506 
507     Input Parameters:
508 +   mapping - mapping between local and global numbering
509 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
510            IS_GTOLM_DROP - drops the indices with no local value from the output list
511 .   n - number of global indices to map
512 -   idx - global indices to map
513 
514     Output Parameters:
515 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
516 -   idxout - local index of each global index, one must pass in an array long enough
517              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
518              idxout == NULL to determine the required length (returned in nout)
519              and then allocate the required space and call ISGlobalToLocalMappingApply()
520              a second time to set the values.
521 
522     Notes:
523     Either nout or idxout may be NULL. idx and idxout may be identical.
524 
525     This is not scalable in memory usage. Each processor requires O(Nglobal) size
526     array to compute these.
527 
528     Level: advanced
529 
530     Developer Note: The manual page states that idx and idxout may be identical but the calling
531        sequence declares idx as const so it cannot be the same as idxout.
532 
533     Concepts: mapping^global to local
534 
535 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
536           ISLocalToGlobalMappingDestroy()
537 @*/
538 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
539                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
540 {
541   PetscInt       i,*globals,nf = 0,tmp,start,end;
542   PetscErrorCode ierr;
543 
544   PetscFunctionBegin;
545   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
546   if (!mapping->globals) {
547     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
548   }
549   globals = mapping->globals;
550   start   = mapping->globalstart;
551   end     = mapping->globalend;
552 
553   if (type == IS_GTOLM_MASK) {
554     if (idxout) {
555       for (i=0; i<n; i++) {
556         if (idx[i] < 0) idxout[i] = idx[i];
557         else if (idx[i] < start) idxout[i] = -1;
558         else if (idx[i] > end)   idxout[i] = -1;
559         else                     idxout[i] = globals[idx[i] - start];
560       }
561     }
562     if (nout) *nout = n;
563   } else {
564     if (idxout) {
565       for (i=0; i<n; i++) {
566         if (idx[i] < 0) continue;
567         if (idx[i] < start) continue;
568         if (idx[i] > end) continue;
569         tmp = globals[idx[i] - start];
570         if (tmp < 0) continue;
571         idxout[nf++] = tmp;
572       }
573     } else {
574       for (i=0; i<n; i++) {
575         if (idx[i] < 0) continue;
576         if (idx[i] < start) continue;
577         if (idx[i] > end) continue;
578         tmp = globals[idx[i] - start];
579         if (tmp < 0) continue;
580         nf++;
581       }
582     }
583     if (nout) *nout = nf;
584   }
585   PetscFunctionReturn(0);
586 }
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
590 /*@C
591     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
592      each index shared by more than one processor
593 
594     Collective on ISLocalToGlobalMapping
595 
596     Input Parameters:
597 .   mapping - the mapping from local to global indexing
598 
599     Output Parameter:
600 +   nproc - number of processors that are connected to this one
601 .   proc - neighboring processors
602 .   numproc - number of indices for each subdomain (processor)
603 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
604 
605     Level: advanced
606 
607     Concepts: mapping^local to global
608 
609     Fortran Usage:
610 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
611 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
612           PetscInt indices[nproc][numprocmax],ierr)
613         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
614         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
615 
616 
617 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
618           ISLocalToGlobalMappingRestoreInfo()
619 @*/
620 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
621 {
622   PetscErrorCode ierr;
623   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
624   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
625   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
626   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
627   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
628   PetscInt       first_procs,first_numprocs,*first_indices;
629   MPI_Request    *recv_waits,*send_waits;
630   MPI_Status     recv_status,*send_status,*recv_statuses;
631   MPI_Comm       comm;
632   PetscBool      debug = PETSC_FALSE;
633 
634   PetscFunctionBegin;
635   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
636   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
637   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
638   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
639   if (size == 1) {
640     *nproc         = 0;
641     *procs         = NULL;
642     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
643     (*numprocs)[0] = 0;
644     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
645     (*indices)[0]  = NULL;
646     PetscFunctionReturn(0);
647   }
648 
649   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
650 
651   /*
652     Notes on ISLocalToGlobalMappingGetInfo
653 
654     globally owned node - the nodes that have been assigned to this processor in global
655            numbering, just for this routine.
656 
657     nontrivial globally owned node - node assigned to this processor that is on a subdomain
658            boundary (i.e. is has more than one local owner)
659 
660     locally owned node - node that exists on this processors subdomain
661 
662     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
663            local subdomain
664   */
665   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
666   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
667   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
668 
669   for (i=0; i<n; i++) {
670     if (lindices[i] > max) max = lindices[i];
671   }
672   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
673   Ng++;
674   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
675   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
676   scale  = Ng/size + 1;
677   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
678   rstart = scale*rank;
679 
680   /* determine ownership ranges of global indices */
681   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
682   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
683 
684   /* determine owners of each local node  */
685   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
686   for (i=0; i<n; i++) {
687     proc             = lindices[i]/scale; /* processor that globally owns this index */
688     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
689     owner[i]         = proc;
690     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
691   }
692   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
693   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
694 
695   /* inform other processors of number of messages and max length*/
696   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
697   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
698 
699   /* post receives for owned rows */
700   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
701   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
702   for (i=0; i<nrecvs; i++) {
703     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
704   }
705 
706   /* pack messages containing lists of local nodes to owners */
707   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
708   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
709   starts[0] = 0;
710   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
711   for (i=0; i<n; i++) {
712     sends[starts[owner[i]]++] = lindices[i];
713     sends[starts[owner[i]]++] = i;
714   }
715   ierr = PetscFree(owner);CHKERRQ(ierr);
716   starts[0] = 0;
717   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
718 
719   /* send the messages */
720   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
721   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
722   cnt = 0;
723   for (i=0; i<size; i++) {
724     if (nprocs[2*i]) {
725       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
726       dest[cnt] = i;
727       cnt++;
728     }
729   }
730   ierr = PetscFree(starts);CHKERRQ(ierr);
731 
732   /* wait on receives */
733   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
734   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
735   cnt  = nrecvs;
736   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
737   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
738   while (cnt) {
739     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
740     /* unpack receives into our local space */
741     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
742     source[imdex] = recv_status.MPI_SOURCE;
743     len[imdex]    = len[imdex]/2;
744     /* count how many local owners for each of my global owned indices */
745     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
746     cnt--;
747   }
748   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
749 
750   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
751   nowned  = 0;
752   nownedm = 0;
753   for (i=0; i<ng; i++) {
754     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
755   }
756 
757   /* create single array to contain rank of all local owners of each globally owned index */
758   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
759   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
760   starts[0] = 0;
761   for (i=1; i<ng; i++) {
762     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
763     else starts[i] = starts[i-1];
764   }
765 
766   /* for each nontrival globally owned node list all arriving processors */
767   for (i=0; i<nrecvs; i++) {
768     for (j=0; j<len[i]; j++) {
769       node = recvs[2*i*nmax+2*j]-rstart;
770       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
771     }
772   }
773 
774   if (debug) { /* -----------------------------------  */
775     starts[0] = 0;
776     for (i=1; i<ng; i++) {
777       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
778       else starts[i] = starts[i-1];
779     }
780     for (i=0; i<ng; i++) {
781       if (nownedsenders[i] > 1) {
782         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
783         for (j=0; j<nownedsenders[i]; j++) {
784           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
785         }
786         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
787       }
788     }
789     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
790   } /* -----------------------------------  */
791 
792   /* wait on original sends */
793   if (nsends) {
794     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
795     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
796     ierr = PetscFree(send_status);CHKERRQ(ierr);
797   }
798   ierr = PetscFree(send_waits);CHKERRQ(ierr);
799   ierr = PetscFree(sends);CHKERRQ(ierr);
800   ierr = PetscFree(nprocs);CHKERRQ(ierr);
801 
802   /* pack messages to send back to local owners */
803   starts[0] = 0;
804   for (i=1; i<ng; i++) {
805     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
806     else starts[i] = starts[i-1];
807   }
808   nsends2 = nrecvs;
809   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
810   for (i=0; i<nrecvs; i++) {
811     nprocs[i] = 1;
812     for (j=0; j<len[i]; j++) {
813       node = recvs[2*i*nmax+2*j]-rstart;
814       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
815     }
816   }
817   nt = 0;
818   for (i=0; i<nsends2; i++) nt += nprocs[i];
819 
820   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
821   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
822 
823   starts2[0] = 0;
824   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
825   /*
826      Each message is 1 + nprocs[i] long, and consists of
827        (0) the number of nodes being sent back
828        (1) the local node number,
829        (2) the number of processors sharing it,
830        (3) the processors sharing it
831   */
832   for (i=0; i<nsends2; i++) {
833     cnt = 1;
834     sends2[starts2[i]] = 0;
835     for (j=0; j<len[i]; j++) {
836       node = recvs[2*i*nmax+2*j]-rstart;
837       if (nownedsenders[node] > 1) {
838         sends2[starts2[i]]++;
839         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
840         sends2[starts2[i]+cnt++] = nownedsenders[node];
841         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
842         cnt += nownedsenders[node];
843       }
844     }
845   }
846 
847   /* receive the message lengths */
848   nrecvs2 = nsends;
849   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
850   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
851   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
852   for (i=0; i<nrecvs2; i++) {
853     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
854   }
855 
856   /* send the message lengths */
857   for (i=0; i<nsends2; i++) {
858     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
859   }
860 
861   /* wait on receives of lens */
862   if (nrecvs2) {
863     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
864     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
865     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
866   }
867   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
868 
869   starts3[0] = 0;
870   nt         = 0;
871   for (i=0; i<nrecvs2-1; i++) {
872     starts3[i+1] = starts3[i] + lens2[i];
873     nt          += lens2[i];
874   }
875   if (nrecvs2) nt += lens2[nrecvs2-1];
876 
877   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
878   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
879   for (i=0; i<nrecvs2; i++) {
880     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
881   }
882 
883   /* send the messages */
884   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
885   for (i=0; i<nsends2; i++) {
886     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
887   }
888 
889   /* wait on receives */
890   if (nrecvs2) {
891     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
892     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
893     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
894   }
895   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
896   ierr = PetscFree(nprocs);CHKERRQ(ierr);
897 
898   if (debug) { /* -----------------------------------  */
899     cnt = 0;
900     for (i=0; i<nrecvs2; i++) {
901       nt = recvs2[cnt++];
902       for (j=0; j<nt; j++) {
903         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
904         for (k=0; k<recvs2[cnt+1]; k++) {
905           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
906         }
907         cnt += 2 + recvs2[cnt+1];
908         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
909       }
910     }
911     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
912   } /* -----------------------------------  */
913 
914   /* count number subdomains for each local node */
915   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
916   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
917   cnt  = 0;
918   for (i=0; i<nrecvs2; i++) {
919     nt = recvs2[cnt++];
920     for (j=0; j<nt; j++) {
921       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
922       cnt += 2 + recvs2[cnt+1];
923     }
924   }
925   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
926   *nproc    = nt;
927   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
928   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
929   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
930   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
931   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
932   cnt       = 0;
933   for (i=0; i<size; i++) {
934     if (nprocs[i] > 0) {
935       bprocs[i]        = cnt;
936       (*procs)[cnt]    = i;
937       (*numprocs)[cnt] = nprocs[i];
938       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
939       cnt++;
940     }
941   }
942 
943   /* make the list of subdomains for each nontrivial local node */
944   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
945   cnt  = 0;
946   for (i=0; i<nrecvs2; i++) {
947     nt = recvs2[cnt++];
948     for (j=0; j<nt; j++) {
949       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
950       cnt += 2 + recvs2[cnt+1];
951     }
952   }
953   ierr = PetscFree(bprocs);CHKERRQ(ierr);
954   ierr = PetscFree(recvs2);CHKERRQ(ierr);
955 
956   /* sort the node indexing by their global numbers */
957   nt = *nproc;
958   for (i=0; i<nt; i++) {
959     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
960     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
961     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
962     ierr = PetscFree(tmp);CHKERRQ(ierr);
963   }
964 
965   if (debug) { /* -----------------------------------  */
966     nt = *nproc;
967     for (i=0; i<nt; i++) {
968       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
969       for (j=0; j<(*numprocs)[i]; j++) {
970         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
971       }
972       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
973     }
974     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
975   } /* -----------------------------------  */
976 
977   /* wait on sends */
978   if (nsends2) {
979     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
980     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
981     ierr = PetscFree(send_status);CHKERRQ(ierr);
982   }
983 
984   ierr = PetscFree(starts3);CHKERRQ(ierr);
985   ierr = PetscFree(dest);CHKERRQ(ierr);
986   ierr = PetscFree(send_waits);CHKERRQ(ierr);
987 
988   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
989   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
990   ierr = PetscFree(starts);CHKERRQ(ierr);
991   ierr = PetscFree(starts2);CHKERRQ(ierr);
992   ierr = PetscFree(lens2);CHKERRQ(ierr);
993 
994   ierr = PetscFree(source);CHKERRQ(ierr);
995   ierr = PetscFree(len);CHKERRQ(ierr);
996   ierr = PetscFree(recvs);CHKERRQ(ierr);
997   ierr = PetscFree(nprocs);CHKERRQ(ierr);
998   ierr = PetscFree(sends2);CHKERRQ(ierr);
999 
1000   /* put the information about myself as the first entry in the list */
1001   first_procs    = (*procs)[0];
1002   first_numprocs = (*numprocs)[0];
1003   first_indices  = (*indices)[0];
1004   for (i=0; i<*nproc; i++) {
1005     if ((*procs)[i] == rank) {
1006       (*procs)[0]    = (*procs)[i];
1007       (*numprocs)[0] = (*numprocs)[i];
1008       (*indices)[0]  = (*indices)[i];
1009       (*procs)[i]    = first_procs;
1010       (*numprocs)[i] = first_numprocs;
1011       (*indices)[i]  = first_indices;
1012       break;
1013     }
1014   }
1015   PetscFunctionReturn(0);
1016 }
1017 
1018 #undef __FUNCT__
1019 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1020 /*@C
1021     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1022 
1023     Collective on ISLocalToGlobalMapping
1024 
1025     Input Parameters:
1026 .   mapping - the mapping from local to global indexing
1027 
1028     Output Parameter:
1029 +   nproc - number of processors that are connected to this one
1030 .   proc - neighboring processors
1031 .   numproc - number of indices for each processor
1032 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1033 
1034     Level: advanced
1035 
1036 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1037           ISLocalToGlobalMappingGetInfo()
1038 @*/
1039 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1040 {
1041   PetscErrorCode ierr;
1042   PetscInt       i;
1043 
1044   PetscFunctionBegin;
1045   ierr = PetscFree(*procs);CHKERRQ(ierr);
1046   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1047   if (*indices) {
1048     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1049     for (i=1; i<*nproc; i++) {
1050       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1051     }
1052     ierr = PetscFree(*indices);CHKERRQ(ierr);
1053   }
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 #undef __FUNCT__
1058 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1059 /*@C
1060    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1061 
1062    Not Collective
1063 
1064    Input Arguments:
1065 . ltog - local to global mapping
1066 
1067    Output Arguments:
1068 . array - array of indices
1069 
1070    Level: advanced
1071 
1072 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1073 @*/
1074 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1075 {
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1078   PetscValidPointer(array,2);
1079   if (ltog->bs == 1) {
1080     *array = ltog->indices;
1081   } else {
1082     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1083     const PetscInt *ii;
1084     PetscErrorCode ierr;
1085 
1086     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1087     *array = jj;
1088     k    = 0;
1089     ii   = ltog->indices;
1090     for (i=0; i<n; i++)
1091       for (j=0; j<bs; j++)
1092         jj[k++] = bs*ii[i] + j;
1093   }
1094   PetscFunctionReturn(0);
1095 }
1096 
1097 #undef __FUNCT__
1098 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1099 /*@C
1100    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1101 
1102    Not Collective
1103 
1104    Input Arguments:
1105 + ltog - local to global mapping
1106 - array - array of indices
1107 
1108    Level: advanced
1109 
1110 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1111 @*/
1112 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1113 {
1114   PetscFunctionBegin;
1115   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1116   PetscValidPointer(array,2);
1117   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1118 
1119   if (ltog->bs > 1) {
1120     PetscErrorCode ierr;
1121     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1122   }
1123   PetscFunctionReturn(0);
1124 }
1125 
1126 #undef __FUNCT__
1127 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1128 /*@C
1129    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1130 
1131    Not Collective
1132 
1133    Input Arguments:
1134 . ltog - local to global mapping
1135 
1136    Output Arguments:
1137 . array - array of indices
1138 
1139    Level: advanced
1140 
1141 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1142 @*/
1143 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1144 {
1145   PetscFunctionBegin;
1146   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1147   PetscValidPointer(array,2);
1148   *array = ltog->indices;
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 #undef __FUNCT__
1153 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1154 /*@C
1155    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1156 
1157    Not Collective
1158 
1159    Input Arguments:
1160 + ltog - local to global mapping
1161 - array - array of indices
1162 
1163    Level: advanced
1164 
1165 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1166 @*/
1167 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1168 {
1169   PetscFunctionBegin;
1170   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1171   PetscValidPointer(array,2);
1172   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1173   *array = NULL;
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 #undef __FUNCT__
1178 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1179 /*@C
1180    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1181 
1182    Not Collective
1183 
1184    Input Arguments:
1185 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1186 . n - number of mappings to concatenate
1187 - ltogs - local to global mappings
1188 
1189    Output Arguments:
1190 . ltogcat - new mapping
1191 
1192    Level: advanced
1193 
1194 .seealso: ISLocalToGlobalMappingCreate()
1195 @*/
1196 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1197 {
1198   PetscInt       i,cnt,m,*idx;
1199   PetscErrorCode ierr;
1200 
1201   PetscFunctionBegin;
1202   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1203   if (n > 0) PetscValidPointer(ltogs,3);
1204   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1205   PetscValidPointer(ltogcat,4);
1206   for (cnt=0,i=0; i<n; i++) {
1207     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1208     cnt += m;
1209   }
1210   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1211   for (cnt=0,i=0; i<n; i++) {
1212     const PetscInt *subidx;
1213     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1214     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1215     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1216     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1217     cnt += m;
1218   }
1219   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1220   PetscFunctionReturn(0);
1221 }
1222 
1223 
1224