xref: /petsc/src/vec/is/utils/isltog.c (revision d96308eb0df530d36fc4a99507847908ecff01f3)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->bs*mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   if (!isblock) {
142     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
143     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
144     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
145   } else {
146     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150   }
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   *bs = mapping->bs;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
232 /*@
233     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234     ordering and a global parallel ordering.
235 
236     Not Collective, but communicator may have more than one process
237 
238     Input Parameters:
239 +   comm - MPI communicator
240 .   bs - the block size
241 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
242 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
243 -   mode - see PetscCopyMode
244 
245     Output Parameter:
246 .   mapping - new mapping data structure
247 
248     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249     Level: advanced
250 
251     Concepts: mapping^local to global
252 
253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254 @*/
255 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256 {
257   PetscErrorCode ierr;
258   PetscInt       *in;
259 
260   PetscFunctionBegin;
261   if (n) PetscValidIntPointer(indices,3);
262   PetscValidPointer(mapping,4);
263 
264   *mapping = NULL;
265   ierr = ISInitializePackage();CHKERRQ(ierr);
266 
267   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
269   (*mapping)->n  = n;
270   (*mapping)->bs = bs;
271   /*
272     Do not create the global to local mapping. This is only created if
273     ISGlobalToLocalMapping() is called
274   */
275   (*mapping)->globals = 0;
276   if (mode == PETSC_COPY_VALUES) {
277     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
278     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
279     (*mapping)->indices = in;
280     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
281   } else if (mode == PETSC_OWN_POINTER) {
282     (*mapping)->indices = (PetscInt*)indices;
283     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
284   }
285   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
286   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
287   PetscFunctionReturn(0);
288 }
289 
290 #undef __FUNCT__
291 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
292 /*@
293    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
294    ordering and a global parallel ordering.
295 
296    Note Collective
297 
298    Input Parameters:
299 .  mapping - mapping data structure
300 
301    Level: advanced
302 
303 .seealso: ISLocalToGlobalMappingCreate()
304 @*/
305 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
306 {
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   if (!*mapping) PetscFunctionReturn(0);
311   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
312   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
313   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
314   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
315   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
316   *mapping = 0;
317   PetscFunctionReturn(0);
318 }
319 
320 #undef __FUNCT__
321 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
322 /*@
323     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
324     a new index set using the global numbering defined in an ISLocalToGlobalMapping
325     context.
326 
327     Not collective
328 
329     Input Parameters:
330 +   mapping - mapping between local and global numbering
331 -   is - index set in local numbering
332 
333     Output Parameters:
334 .   newis - index set in global numbering
335 
336     Level: advanced
337 
338     Concepts: mapping^local to global
339 
340 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
341           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
342 @*/
343 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
344 {
345   PetscErrorCode ierr;
346   PetscInt       n,*idxout;
347   const PetscInt *idxin;
348 
349   PetscFunctionBegin;
350   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
351   PetscValidHeaderSpecific(is,IS_CLASSID,2);
352   PetscValidPointer(newis,3);
353 
354   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
355   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
356   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
357   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
358   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
359   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
360   PetscFunctionReturn(0);
361 }
362 
363 #undef __FUNCT__
364 #define __FUNCT__ "ISLocalToGlobalMappingApply"
365 /*@
366    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
367    and converts them to the global numbering.
368 
369    Not collective
370 
371    Input Parameters:
372 +  mapping - the local to global mapping context
373 .  N - number of integers
374 -  in - input indices in local numbering
375 
376    Output Parameter:
377 .  out - indices in global numbering
378 
379    Notes:
380    The in and out array parameters may be identical.
381 
382    Level: advanced
383 
384 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
385           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
386           AOPetscToApplication(), ISGlobalToLocalMappingApply()
387 
388     Concepts: mapping^local to global
389 @*/
390 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
391 {
392   PetscInt       i,bs = mapping->bs,Nmax = bs*mapping->n;
393   const PetscInt *idx = mapping->indices;
394 
395   PetscFunctionBegin;
396   if (bs == 1) {
397     for (i=0; i<N; i++) {
398       if (in[i] < 0) {
399         out[i] = in[i];
400         continue;
401       }
402       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
403       out[i] = idx[in[i]];
404     }
405   } else {
406     for (i=0; i<N; i++) {
407       if (in[i] < 0) {
408         out[i] = in[i];
409         continue;
410       }
411       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
412       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
413     }
414   }
415   PetscFunctionReturn(0);
416 }
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
420 /*@
421    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
422 
423    Not collective
424 
425    Input Parameters:
426 +  mapping - the local to global mapping context
427 .  N - number of integers
428 -  in - input indices in local block numbering
429 
430    Output Parameter:
431 .  out - indices in global block numbering
432 
433    Notes:
434    The in and out array parameters may be identical.
435 
436    Example:
437      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
438      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
439 
440    Level: advanced
441 
442 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
443           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
444           AOPetscToApplication(), ISGlobalToLocalMappingApply()
445 
446     Concepts: mapping^local to global
447 @*/
448 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
449 {
450   PetscInt       i,Nmax = mapping->n;
451   const PetscInt *idx = mapping->indices;
452 
453   PetscFunctionBegin;
454   for (i=0; i<N; i++) {
455     if (in[i] < 0) {
456       out[i] = in[i];
457       continue;
458     }
459     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
460     out[i] = idx[in[i]];
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 /* -----------------------------------------------------------------------------------------*/
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
469 /*
470     Creates the global fields in the ISLocalToGlobalMapping structure
471 */
472 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
473 {
474   PetscErrorCode ierr;
475   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
476 
477   PetscFunctionBegin;
478   end   = 0;
479   start = PETSC_MAX_INT;
480 
481   for (i=0; i<n; i++) {
482     if (idx[i] < 0) continue;
483     if (idx[i] < start) start = idx[i];
484     if (idx[i] > end)   end   = idx[i];
485   }
486   if (start > end) {start = 0; end = -1;}
487   mapping->globalstart = start;
488   mapping->globalend   = end;
489 
490   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
491   mapping->globals = globals;
492   for (i=0; i<end-start+1; i++) globals[i] = -1;
493   for (i=0; i<n; i++) {
494     if (idx[i] < 0) continue;
495     globals[idx[i] - start] = i;
496   }
497 
498   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
499   PetscFunctionReturn(0);
500 }
501 
502 #undef __FUNCT__
503 #define __FUNCT__ "ISGlobalToLocalMappingApply"
504 /*@
505     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
506     specified with a global numbering.
507 
508     Not collective
509 
510     Input Parameters:
511 +   mapping - mapping between local and global numbering
512 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
513            IS_GTOLM_DROP - drops the indices with no local value from the output list
514 .   n - number of global indices to map
515 -   idx - global indices to map
516 
517     Output Parameters:
518 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
519 -   idxout - local index of each global index, one must pass in an array long enough
520              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
521              idxout == NULL to determine the required length (returned in nout)
522              and then allocate the required space and call ISGlobalToLocalMappingApply()
523              a second time to set the values.
524 
525     Notes:
526     Either nout or idxout may be NULL. idx and idxout may be identical.
527 
528     This is not scalable in memory usage. Each processor requires O(Nglobal) size
529     array to compute these.
530 
531     Level: advanced
532 
533     Developer Note: The manual page states that idx and idxout may be identical but the calling
534        sequence declares idx as const so it cannot be the same as idxout.
535 
536     Concepts: mapping^global to local
537 
538 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
539           ISLocalToGlobalMappingDestroy()
540 @*/
541 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
542                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
543 {
544   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin;
548   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
549   if (!mapping->globals) {
550     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
551   }
552   globals = mapping->globals;
553   start   = mapping->globalstart;
554   end     = mapping->globalend;
555   bs      = mapping->bs;
556 
557   if (type == IS_GTOLM_MASK) {
558     if (idxout) {
559       for (i=0; i<n; i++) {
560         if (idx[i] < 0) idxout[i] = idx[i];
561         else if (idx[i] < bs*start) idxout[i] = -1;
562         else if (idx[i] > bs*end)   idxout[i] = -1;
563         else                        idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
564       }
565     }
566     if (nout) *nout = n;
567   } else {
568     if (idxout) {
569       for (i=0; i<n; i++) {
570         if (idx[i] < 0) continue;
571         if (idx[i] < bs*start) continue;
572         if (idx[i] > bs*end) continue;
573         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
574         if (tmp < 0) continue;
575         idxout[nf++] = tmp;
576       }
577     } else {
578       for (i=0; i<n; i++) {
579         if (idx[i] < 0) continue;
580         if (idx[i] < bs*start) continue;
581         if (idx[i] > bs*end) continue;
582         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
583         if (tmp < 0) continue;
584         nf++;
585       }
586     }
587     if (nout) *nout = nf;
588   }
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
594 /*@
595     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
596     specified with a block global numbering.
597 
598     Not collective
599 
600     Input Parameters:
601 +   mapping - mapping between local and global numbering
602 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
603            IS_GTOLM_DROP - drops the indices with no local value from the output list
604 .   n - number of global indices to map
605 -   idx - global indices to map
606 
607     Output Parameters:
608 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
609 -   idxout - local index of each global index, one must pass in an array long enough
610              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
611              idxout == NULL to determine the required length (returned in nout)
612              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
613              a second time to set the values.
614 
615     Notes:
616     Either nout or idxout may be NULL. idx and idxout may be identical.
617 
618     This is not scalable in memory usage. Each processor requires O(Nglobal) size
619     array to compute these.
620 
621     Level: advanced
622 
623     Developer Note: The manual page states that idx and idxout may be identical but the calling
624        sequence declares idx as const so it cannot be the same as idxout.
625 
626     Concepts: mapping^global to local
627 
628 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
629           ISLocalToGlobalMappingDestroy()
630 @*/
631 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
632                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
633 {
634   PetscInt       i,*globals,nf = 0,tmp,start,end;
635   PetscErrorCode ierr;
636 
637   PetscFunctionBegin;
638   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
639   if (!mapping->globals) {
640     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
641   }
642   globals = mapping->globals;
643   start   = mapping->globalstart;
644   end     = mapping->globalend;
645 
646   if (type == IS_GTOLM_MASK) {
647     if (idxout) {
648       for (i=0; i<n; i++) {
649         if (idx[i] < 0) idxout[i] = idx[i];
650         else if (idx[i] < start) idxout[i] = -1;
651         else if (idx[i] > end)   idxout[i] = -1;
652         else                     idxout[i] = globals[idx[i] - start];
653       }
654     }
655     if (nout) *nout = n;
656   } else {
657     if (idxout) {
658       for (i=0; i<n; i++) {
659         if (idx[i] < 0) continue;
660         if (idx[i] < start) continue;
661         if (idx[i] > end) continue;
662         tmp = globals[idx[i] - start];
663         if (tmp < 0) continue;
664         idxout[nf++] = tmp;
665       }
666     } else {
667       for (i=0; i<n; i++) {
668         if (idx[i] < 0) continue;
669         if (idx[i] < start) continue;
670         if (idx[i] > end) continue;
671         tmp = globals[idx[i] - start];
672         if (tmp < 0) continue;
673         nf++;
674       }
675     }
676     if (nout) *nout = nf;
677   }
678   PetscFunctionReturn(0);
679 }
680 
681 #undef __FUNCT__
682 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
683 /*@C
684     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
685      each index shared by more than one processor
686 
687     Collective on ISLocalToGlobalMapping
688 
689     Input Parameters:
690 .   mapping - the mapping from local to global indexing
691 
692     Output Parameter:
693 +   nproc - number of processors that are connected to this one
694 .   proc - neighboring processors
695 .   numproc - number of indices for each subdomain (processor)
696 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
697 
698     Level: advanced
699 
700     Concepts: mapping^local to global
701 
702     Fortran Usage:
703 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
704 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
705           PetscInt indices[nproc][numprocmax],ierr)
706         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
707         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
708 
709 
710 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
711           ISLocalToGlobalMappingRestoreInfo()
712 @*/
713 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
714 {
715   PetscErrorCode ierr;
716   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
717   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
718   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
719   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
720   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
721   PetscInt       first_procs,first_numprocs,*first_indices;
722   MPI_Request    *recv_waits,*send_waits;
723   MPI_Status     recv_status,*send_status,*recv_statuses;
724   MPI_Comm       comm;
725   PetscBool      debug = PETSC_FALSE;
726 
727   PetscFunctionBegin;
728   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
729   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
730   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
731   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
732   if (size == 1) {
733     *nproc         = 0;
734     *procs         = NULL;
735     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
736     (*numprocs)[0] = 0;
737     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
738     (*indices)[0]  = NULL;
739     PetscFunctionReturn(0);
740   }
741 
742   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
743 
744   /*
745     Notes on ISLocalToGlobalMappingGetBlockInfo
746 
747     globally owned node - the nodes that have been assigned to this processor in global
748            numbering, just for this routine.
749 
750     nontrivial globally owned node - node assigned to this processor that is on a subdomain
751            boundary (i.e. is has more than one local owner)
752 
753     locally owned node - node that exists on this processors subdomain
754 
755     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
756            local subdomain
757   */
758   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
759   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
760   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
761 
762   for (i=0; i<n; i++) {
763     if (lindices[i] > max) max = lindices[i];
764   }
765   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
766   Ng++;
767   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
768   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
769   scale  = Ng/size + 1;
770   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
771   rstart = scale*rank;
772 
773   /* determine ownership ranges of global indices */
774   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
775   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
776 
777   /* determine owners of each local node  */
778   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
779   for (i=0; i<n; i++) {
780     proc             = lindices[i]/scale; /* processor that globally owns this index */
781     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
782     owner[i]         = proc;
783     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
784   }
785   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
786   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
787 
788   /* inform other processors of number of messages and max length*/
789   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
790   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
791 
792   /* post receives for owned rows */
793   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
794   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
795   for (i=0; i<nrecvs; i++) {
796     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
797   }
798 
799   /* pack messages containing lists of local nodes to owners */
800   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
801   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
802   starts[0] = 0;
803   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
804   for (i=0; i<n; i++) {
805     sends[starts[owner[i]]++] = lindices[i];
806     sends[starts[owner[i]]++] = i;
807   }
808   ierr = PetscFree(owner);CHKERRQ(ierr);
809   starts[0] = 0;
810   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
811 
812   /* send the messages */
813   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
814   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
815   cnt = 0;
816   for (i=0; i<size; i++) {
817     if (nprocs[2*i]) {
818       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
819       dest[cnt] = i;
820       cnt++;
821     }
822   }
823   ierr = PetscFree(starts);CHKERRQ(ierr);
824 
825   /* wait on receives */
826   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
827   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
828   cnt  = nrecvs;
829   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
830   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
831   while (cnt) {
832     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
833     /* unpack receives into our local space */
834     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
835     source[imdex] = recv_status.MPI_SOURCE;
836     len[imdex]    = len[imdex]/2;
837     /* count how many local owners for each of my global owned indices */
838     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
839     cnt--;
840   }
841   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
842 
843   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
844   nowned  = 0;
845   nownedm = 0;
846   for (i=0; i<ng; i++) {
847     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
848   }
849 
850   /* create single array to contain rank of all local owners of each globally owned index */
851   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
852   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
853   starts[0] = 0;
854   for (i=1; i<ng; i++) {
855     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
856     else starts[i] = starts[i-1];
857   }
858 
859   /* for each nontrival globally owned node list all arriving processors */
860   for (i=0; i<nrecvs; i++) {
861     for (j=0; j<len[i]; j++) {
862       node = recvs[2*i*nmax+2*j]-rstart;
863       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
864     }
865   }
866 
867   if (debug) { /* -----------------------------------  */
868     starts[0] = 0;
869     for (i=1; i<ng; i++) {
870       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
871       else starts[i] = starts[i-1];
872     }
873     for (i=0; i<ng; i++) {
874       if (nownedsenders[i] > 1) {
875         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
876         for (j=0; j<nownedsenders[i]; j++) {
877           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
878         }
879         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
880       }
881     }
882     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
883   } /* -----------------------------------  */
884 
885   /* wait on original sends */
886   if (nsends) {
887     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
888     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
889     ierr = PetscFree(send_status);CHKERRQ(ierr);
890   }
891   ierr = PetscFree(send_waits);CHKERRQ(ierr);
892   ierr = PetscFree(sends);CHKERRQ(ierr);
893   ierr = PetscFree(nprocs);CHKERRQ(ierr);
894 
895   /* pack messages to send back to local owners */
896   starts[0] = 0;
897   for (i=1; i<ng; i++) {
898     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
899     else starts[i] = starts[i-1];
900   }
901   nsends2 = nrecvs;
902   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
903   for (i=0; i<nrecvs; i++) {
904     nprocs[i] = 1;
905     for (j=0; j<len[i]; j++) {
906       node = recvs[2*i*nmax+2*j]-rstart;
907       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
908     }
909   }
910   nt = 0;
911   for (i=0; i<nsends2; i++) nt += nprocs[i];
912 
913   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
914   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
915 
916   starts2[0] = 0;
917   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
918   /*
919      Each message is 1 + nprocs[i] long, and consists of
920        (0) the number of nodes being sent back
921        (1) the local node number,
922        (2) the number of processors sharing it,
923        (3) the processors sharing it
924   */
925   for (i=0; i<nsends2; i++) {
926     cnt = 1;
927     sends2[starts2[i]] = 0;
928     for (j=0; j<len[i]; j++) {
929       node = recvs[2*i*nmax+2*j]-rstart;
930       if (nownedsenders[node] > 1) {
931         sends2[starts2[i]]++;
932         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
933         sends2[starts2[i]+cnt++] = nownedsenders[node];
934         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
935         cnt += nownedsenders[node];
936       }
937     }
938   }
939 
940   /* receive the message lengths */
941   nrecvs2 = nsends;
942   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
943   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
944   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
945   for (i=0; i<nrecvs2; i++) {
946     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
947   }
948 
949   /* send the message lengths */
950   for (i=0; i<nsends2; i++) {
951     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
952   }
953 
954   /* wait on receives of lens */
955   if (nrecvs2) {
956     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
957     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
958     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
959   }
960   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
961 
962   starts3[0] = 0;
963   nt         = 0;
964   for (i=0; i<nrecvs2-1; i++) {
965     starts3[i+1] = starts3[i] + lens2[i];
966     nt          += lens2[i];
967   }
968   if (nrecvs2) nt += lens2[nrecvs2-1];
969 
970   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
971   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
972   for (i=0; i<nrecvs2; i++) {
973     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
974   }
975 
976   /* send the messages */
977   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
978   for (i=0; i<nsends2; i++) {
979     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
980   }
981 
982   /* wait on receives */
983   if (nrecvs2) {
984     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
985     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
986     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
987   }
988   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
989   ierr = PetscFree(nprocs);CHKERRQ(ierr);
990 
991   if (debug) { /* -----------------------------------  */
992     cnt = 0;
993     for (i=0; i<nrecvs2; i++) {
994       nt = recvs2[cnt++];
995       for (j=0; j<nt; j++) {
996         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
997         for (k=0; k<recvs2[cnt+1]; k++) {
998           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
999         }
1000         cnt += 2 + recvs2[cnt+1];
1001         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1002       }
1003     }
1004     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1005   } /* -----------------------------------  */
1006 
1007   /* count number subdomains for each local node */
1008   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1009   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1010   cnt  = 0;
1011   for (i=0; i<nrecvs2; i++) {
1012     nt = recvs2[cnt++];
1013     for (j=0; j<nt; j++) {
1014       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1015       cnt += 2 + recvs2[cnt+1];
1016     }
1017   }
1018   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1019   *nproc    = nt;
1020   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
1021   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
1022   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
1023   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1024   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1025   cnt       = 0;
1026   for (i=0; i<size; i++) {
1027     if (nprocs[i] > 0) {
1028       bprocs[i]        = cnt;
1029       (*procs)[cnt]    = i;
1030       (*numprocs)[cnt] = nprocs[i];
1031       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1032       cnt++;
1033     }
1034   }
1035 
1036   /* make the list of subdomains for each nontrivial local node */
1037   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1038   cnt  = 0;
1039   for (i=0; i<nrecvs2; i++) {
1040     nt = recvs2[cnt++];
1041     for (j=0; j<nt; j++) {
1042       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1043       cnt += 2 + recvs2[cnt+1];
1044     }
1045   }
1046   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1047   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1048 
1049   /* sort the node indexing by their global numbers */
1050   nt = *nproc;
1051   for (i=0; i<nt; i++) {
1052     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
1053     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1054     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1055     ierr = PetscFree(tmp);CHKERRQ(ierr);
1056   }
1057 
1058   if (debug) { /* -----------------------------------  */
1059     nt = *nproc;
1060     for (i=0; i<nt; i++) {
1061       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1062       for (j=0; j<(*numprocs)[i]; j++) {
1063         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1064       }
1065       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1066     }
1067     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1068   } /* -----------------------------------  */
1069 
1070   /* wait on sends */
1071   if (nsends2) {
1072     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1073     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1074     ierr = PetscFree(send_status);CHKERRQ(ierr);
1075   }
1076 
1077   ierr = PetscFree(starts3);CHKERRQ(ierr);
1078   ierr = PetscFree(dest);CHKERRQ(ierr);
1079   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1080 
1081   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1082   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1083   ierr = PetscFree(starts);CHKERRQ(ierr);
1084   ierr = PetscFree(starts2);CHKERRQ(ierr);
1085   ierr = PetscFree(lens2);CHKERRQ(ierr);
1086 
1087   ierr = PetscFree(source);CHKERRQ(ierr);
1088   ierr = PetscFree(len);CHKERRQ(ierr);
1089   ierr = PetscFree(recvs);CHKERRQ(ierr);
1090   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1091   ierr = PetscFree(sends2);CHKERRQ(ierr);
1092 
1093   /* put the information about myself as the first entry in the list */
1094   first_procs    = (*procs)[0];
1095   first_numprocs = (*numprocs)[0];
1096   first_indices  = (*indices)[0];
1097   for (i=0; i<*nproc; i++) {
1098     if ((*procs)[i] == rank) {
1099       (*procs)[0]    = (*procs)[i];
1100       (*numprocs)[0] = (*numprocs)[i];
1101       (*indices)[0]  = (*indices)[i];
1102       (*procs)[i]    = first_procs;
1103       (*numprocs)[i] = first_numprocs;
1104       (*indices)[i]  = first_indices;
1105       break;
1106     }
1107   }
1108   PetscFunctionReturn(0);
1109 }
1110 
1111 #undef __FUNCT__
1112 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1113 /*@C
1114     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1115 
1116     Collective on ISLocalToGlobalMapping
1117 
1118     Input Parameters:
1119 .   mapping - the mapping from local to global indexing
1120 
1121     Output Parameter:
1122 +   nproc - number of processors that are connected to this one
1123 .   proc - neighboring processors
1124 .   numproc - number of indices for each processor
1125 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1126 
1127     Level: advanced
1128 
1129 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1130           ISLocalToGlobalMappingGetInfo()
1131 @*/
1132 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1133 {
1134   PetscErrorCode ierr;
1135   PetscInt       i;
1136 
1137   PetscFunctionBegin;
1138   ierr = PetscFree(*procs);CHKERRQ(ierr);
1139   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1140   if (*indices) {
1141     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1142     for (i=1; i<*nproc; i++) {
1143       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1144     }
1145     ierr = PetscFree(*indices);CHKERRQ(ierr);
1146   }
1147   PetscFunctionReturn(0);
1148 }
1149 
1150 #undef __FUNCT__
1151 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1152 /*@C
1153     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1154      each index shared by more than one processor
1155 
1156     Collective on ISLocalToGlobalMapping
1157 
1158     Input Parameters:
1159 .   mapping - the mapping from local to global indexing
1160 
1161     Output Parameter:
1162 +   nproc - number of processors that are connected to this one
1163 .   proc - neighboring processors
1164 .   numproc - number of indices for each subdomain (processor)
1165 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1166 
1167     Level: advanced
1168 
1169     Concepts: mapping^local to global
1170 
1171     Fortran Usage:
1172 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1173 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1174           PetscInt indices[nproc][numprocmax],ierr)
1175         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1176         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1177 
1178 
1179 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1180           ISLocalToGlobalMappingRestoreInfo()
1181 @*/
1182 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1183 {
1184   PetscErrorCode ierr;
1185   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;
1186 
1187   PetscFunctionBegin;
1188   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1189   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr);
1190   ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1191   for (i=0; i<*nproc; i++) {
1192     ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr);
1193     for (j=0; j<(*numprocs)[i]; j++) {
1194       for (k=0; k<bs; k++) {
1195         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1196       }
1197     }
1198     (*numprocs)[i] *= bs;
1199   }
1200   if (bindices) {
1201     ierr = PetscFree(bindices[0]);CHKERRQ(ierr);
1202     for (i=1; i<*nproc; i++) {
1203       ierr = PetscFree(bindices[i]);CHKERRQ(ierr);
1204     }
1205     ierr = PetscFree(bindices);CHKERRQ(ierr);
1206   }
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 #undef __FUNCT__
1211 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1212 /*@C
1213     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1214 
1215     Collective on ISLocalToGlobalMapping
1216 
1217     Input Parameters:
1218 .   mapping - the mapping from local to global indexing
1219 
1220     Output Parameter:
1221 +   nproc - number of processors that are connected to this one
1222 .   proc - neighboring processors
1223 .   numproc - number of indices for each processor
1224 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1225 
1226     Level: advanced
1227 
1228 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1229           ISLocalToGlobalMappingGetInfo()
1230 @*/
1231 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1232 {
1233   PetscErrorCode ierr;
1234 
1235   PetscFunctionBegin;
1236   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1242 /*@C
1243    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1244 
1245    Not Collective
1246 
1247    Input Arguments:
1248 . ltog - local to global mapping
1249 
1250    Output Arguments:
1251 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1252 
1253    Level: advanced
1254 
1255    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1256 
1257 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1258 @*/
1259 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1260 {
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1263   PetscValidPointer(array,2);
1264   if (ltog->bs == 1) {
1265     *array = ltog->indices;
1266   } else {
1267     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1268     const PetscInt *ii;
1269     PetscErrorCode ierr;
1270 
1271     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1272     *array = jj;
1273     k    = 0;
1274     ii   = ltog->indices;
1275     for (i=0; i<n; i++)
1276       for (j=0; j<bs; j++)
1277         jj[k++] = bs*ii[i] + j;
1278   }
1279   PetscFunctionReturn(0);
1280 }
1281 
1282 #undef __FUNCT__
1283 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1284 /*@C
1285    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1286 
1287    Not Collective
1288 
1289    Input Arguments:
1290 + ltog - local to global mapping
1291 - array - array of indices
1292 
1293    Level: advanced
1294 
1295 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1296 @*/
1297 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1298 {
1299   PetscFunctionBegin;
1300   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1301   PetscValidPointer(array,2);
1302   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1303 
1304   if (ltog->bs > 1) {
1305     PetscErrorCode ierr;
1306     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1307   }
1308   PetscFunctionReturn(0);
1309 }
1310 
1311 #undef __FUNCT__
1312 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1313 /*@C
1314    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1315 
1316    Not Collective
1317 
1318    Input Arguments:
1319 . ltog - local to global mapping
1320 
1321    Output Arguments:
1322 . array - array of indices
1323 
1324    Level: advanced
1325 
1326 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1327 @*/
1328 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1329 {
1330   PetscFunctionBegin;
1331   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1332   PetscValidPointer(array,2);
1333   *array = ltog->indices;
1334   PetscFunctionReturn(0);
1335 }
1336 
1337 #undef __FUNCT__
1338 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1339 /*@C
1340    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1341 
1342    Not Collective
1343 
1344    Input Arguments:
1345 + ltog - local to global mapping
1346 - array - array of indices
1347 
1348    Level: advanced
1349 
1350 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1351 @*/
1352 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1353 {
1354   PetscFunctionBegin;
1355   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1356   PetscValidPointer(array,2);
1357   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1358   *array = NULL;
1359   PetscFunctionReturn(0);
1360 }
1361 
1362 #undef __FUNCT__
1363 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1364 /*@C
1365    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1366 
1367    Not Collective
1368 
1369    Input Arguments:
1370 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1371 . n - number of mappings to concatenate
1372 - ltogs - local to global mappings
1373 
1374    Output Arguments:
1375 . ltogcat - new mapping
1376 
1377    Note: this currently always returns a mapping with block size of 1
1378 
1379    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1380 
1381    Level: advanced
1382 
1383 .seealso: ISLocalToGlobalMappingCreate()
1384 @*/
1385 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1386 {
1387   PetscInt       i,cnt,m,*idx;
1388   PetscErrorCode ierr;
1389 
1390   PetscFunctionBegin;
1391   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1392   if (n > 0) PetscValidPointer(ltogs,3);
1393   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1394   PetscValidPointer(ltogcat,4);
1395   for (cnt=0,i=0; i<n; i++) {
1396     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1397     cnt += m;
1398   }
1399   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1400   for (cnt=0,i=0; i<n; i++) {
1401     const PetscInt *subidx;
1402     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1403     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1404     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1405     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1406     cnt += m;
1407   }
1408   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1409   PetscFunctionReturn(0);
1410 }
1411 
1412 
1413