xref: /petsc/src/vec/is/utils/isltog.c (revision af0996ce37bc06907c37d8d91773840993d61e62)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->bs*mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   if (!isblock) {
142     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
143     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
144     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
145   } else {
146     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150   }
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
227   *bs = mapping->bs;
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
233 /*@
234     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
235     ordering and a global parallel ordering.
236 
237     Not Collective, but communicator may have more than one process
238 
239     Input Parameters:
240 +   comm - MPI communicator
241 .   bs - the block size
242 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
243 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
244 -   mode - see PetscCopyMode
245 
246     Output Parameter:
247 .   mapping - new mapping data structure
248 
249     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
250     Level: advanced
251 
252     Concepts: mapping^local to global
253 
254 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
255 @*/
256 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
257 {
258   PetscErrorCode ierr;
259   PetscInt       *in;
260 
261   PetscFunctionBegin;
262   if (n) PetscValidIntPointer(indices,3);
263   PetscValidPointer(mapping,4);
264 
265   *mapping = NULL;
266   ierr = ISInitializePackage();CHKERRQ(ierr);
267 
268   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
269                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
270   (*mapping)->n  = n;
271   (*mapping)->bs = bs;
272   /*
273     Do not create the global to local mapping. This is only created if
274     ISGlobalToLocalMapping() is called
275   */
276   (*mapping)->globals = 0;
277   if (mode == PETSC_COPY_VALUES) {
278     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
279     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
280     (*mapping)->indices = in;
281     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
282   } else if (mode == PETSC_OWN_POINTER) {
283     (*mapping)->indices = (PetscInt*)indices;
284     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
285   }
286   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
287   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
288   PetscFunctionReturn(0);
289 }
290 
291 #undef __FUNCT__
292 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
293 /*@
294    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
295    ordering and a global parallel ordering.
296 
297    Note Collective
298 
299    Input Parameters:
300 .  mapping - mapping data structure
301 
302    Level: advanced
303 
304 .seealso: ISLocalToGlobalMappingCreate()
305 @*/
306 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
307 {
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*mapping) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
313   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
314   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
315   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
316   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
317   *mapping = 0;
318   PetscFunctionReturn(0);
319 }
320 
321 #undef __FUNCT__
322 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
323 /*@
324     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
325     a new index set using the global numbering defined in an ISLocalToGlobalMapping
326     context.
327 
328     Not collective
329 
330     Input Parameters:
331 +   mapping - mapping between local and global numbering
332 -   is - index set in local numbering
333 
334     Output Parameters:
335 .   newis - index set in global numbering
336 
337     Level: advanced
338 
339     Concepts: mapping^local to global
340 
341 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
342           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
343 @*/
344 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
345 {
346   PetscErrorCode ierr;
347   PetscInt       n,*idxout;
348   const PetscInt *idxin;
349 
350   PetscFunctionBegin;
351   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
352   PetscValidHeaderSpecific(is,IS_CLASSID,2);
353   PetscValidPointer(newis,3);
354 
355   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
356   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
357   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
358   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
359   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
360   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
361   PetscFunctionReturn(0);
362 }
363 
364 #undef __FUNCT__
365 #define __FUNCT__ "ISLocalToGlobalMappingApply"
366 /*@
367    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
368    and converts them to the global numbering.
369 
370    Not collective
371 
372    Input Parameters:
373 +  mapping - the local to global mapping context
374 .  N - number of integers
375 -  in - input indices in local numbering
376 
377    Output Parameter:
378 .  out - indices in global numbering
379 
380    Notes:
381    The in and out array parameters may be identical.
382 
383    Level: advanced
384 
385 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
386           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
387           AOPetscToApplication(), ISGlobalToLocalMappingApply()
388 
389     Concepts: mapping^local to global
390 @*/
391 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
392 {
393   PetscInt i,bs,Nmax;
394 
395   PetscFunctionBegin;
396   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
397   bs   = mapping->bs;
398   Nmax = bs*mapping->n;
399   if (bs == 1) {
400     const PetscInt *idx = mapping->indices;
401     for (i=0; i<N; i++) {
402       if (in[i] < 0) {
403         out[i] = in[i];
404         continue;
405       }
406       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
407       out[i] = idx[in[i]];
408     }
409   } else {
410     const PetscInt *idx = mapping->indices;
411     for (i=0; i<N; i++) {
412       if (in[i] < 0) {
413         out[i] = in[i];
414         continue;
415       }
416       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
417       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
418     }
419   }
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
425 /*@
426    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
427 
428    Not collective
429 
430    Input Parameters:
431 +  mapping - the local to global mapping context
432 .  N - number of integers
433 -  in - input indices in local block numbering
434 
435    Output Parameter:
436 .  out - indices in global block numbering
437 
438    Notes:
439    The in and out array parameters may be identical.
440 
441    Example:
442      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
443      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
444 
445    Level: advanced
446 
447 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
448           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
449           AOPetscToApplication(), ISGlobalToLocalMappingApply()
450 
451     Concepts: mapping^local to global
452 @*/
453 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
454 {
455 
456   PetscFunctionBegin;
457   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
458   {
459     PetscInt i,Nmax = mapping->n;
460     const PetscInt *idx = mapping->indices;
461 
462     for (i=0; i<N; i++) {
463       if (in[i] < 0) {
464         out[i] = in[i];
465         continue;
466       }
467       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
468       out[i] = idx[in[i]];
469     }
470   }
471   PetscFunctionReturn(0);
472 }
473 
474 /* -----------------------------------------------------------------------------------------*/
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
478 /*
479     Creates the global fields in the ISLocalToGlobalMapping structure
480 */
481 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
482 {
483   PetscErrorCode ierr;
484   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
485 
486   PetscFunctionBegin;
487   end   = 0;
488   start = PETSC_MAX_INT;
489 
490   for (i=0; i<n; i++) {
491     if (idx[i] < 0) continue;
492     if (idx[i] < start) start = idx[i];
493     if (idx[i] > end)   end   = idx[i];
494   }
495   if (start > end) {start = 0; end = -1;}
496   mapping->globalstart = start;
497   mapping->globalend   = end;
498 
499   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
500   mapping->globals = globals;
501   for (i=0; i<end-start+1; i++) globals[i] = -1;
502   for (i=0; i<n; i++) {
503     if (idx[i] < 0) continue;
504     globals[idx[i] - start] = i;
505   }
506 
507   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
508   PetscFunctionReturn(0);
509 }
510 
511 #undef __FUNCT__
512 #define __FUNCT__ "ISGlobalToLocalMappingApply"
513 /*@
514     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
515     specified with a global numbering.
516 
517     Not collective
518 
519     Input Parameters:
520 +   mapping - mapping between local and global numbering
521 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
522            IS_GTOLM_DROP - drops the indices with no local value from the output list
523 .   n - number of global indices to map
524 -   idx - global indices to map
525 
526     Output Parameters:
527 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
528 -   idxout - local index of each global index, one must pass in an array long enough
529              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
530              idxout == NULL to determine the required length (returned in nout)
531              and then allocate the required space and call ISGlobalToLocalMappingApply()
532              a second time to set the values.
533 
534     Notes:
535     Either nout or idxout may be NULL. idx and idxout may be identical.
536 
537     This is not scalable in memory usage. Each processor requires O(Nglobal) size
538     array to compute these.
539 
540     Level: advanced
541 
542     Developer Note: The manual page states that idx and idxout may be identical but the calling
543        sequence declares idx as const so it cannot be the same as idxout.
544 
545     Concepts: mapping^global to local
546 
547 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
548           ISLocalToGlobalMappingDestroy()
549 @*/
550 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
551                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
552 {
553   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
558   if (!mapping->globals) {
559     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
560   }
561   globals = mapping->globals;
562   start   = mapping->globalstart;
563   end     = mapping->globalend;
564   bs      = mapping->bs;
565 
566   if (type == IS_GTOLM_MASK) {
567     if (idxout) {
568       for (i=0; i<n; i++) {
569         if (idx[i] < 0)                   idxout[i] = idx[i];
570         else if (idx[i] < bs*start)       idxout[i] = -1;
571         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
572         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
573       }
574     }
575     if (nout) *nout = n;
576   } else {
577     if (idxout) {
578       for (i=0; i<n; i++) {
579         if (idx[i] < 0) continue;
580         if (idx[i] < bs*start) continue;
581         if (idx[i] > bs*(end+1)-1) continue;
582         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
583         if (tmp < 0) continue;
584         idxout[nf++] = tmp;
585       }
586     } else {
587       for (i=0; i<n; i++) {
588         if (idx[i] < 0) continue;
589         if (idx[i] < bs*start) continue;
590         if (idx[i] > bs*(end+1)-1) continue;
591         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
592         if (tmp < 0) continue;
593         nf++;
594       }
595     }
596     if (nout) *nout = nf;
597   }
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
603 /*@
604     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
605     a new index set using the local numbering defined in an ISLocalToGlobalMapping
606     context.
607 
608     Not collective
609 
610     Input Parameters:
611 +   mapping - mapping between local and global numbering
612 -   is - index set in global numbering
613 
614     Output Parameters:
615 .   newis - index set in local numbering
616 
617     Level: advanced
618 
619     Concepts: mapping^local to global
620 
621 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
622           ISLocalToGlobalMappingDestroy()
623 @*/
624 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
625 {
626   PetscErrorCode ierr;
627   PetscInt       n,nout,*idxout;
628   const PetscInt *idxin;
629 
630   PetscFunctionBegin;
631   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
632   PetscValidHeaderSpecific(is,IS_CLASSID,3);
633   PetscValidPointer(newis,4);
634 
635   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
636   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
637   if (type == IS_GTOLM_MASK) {
638     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
639   } else {
640     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
641     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
642   }
643   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
644   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
645   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
651 /*@
652     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
653     specified with a block global numbering.
654 
655     Not collective
656 
657     Input Parameters:
658 +   mapping - mapping between local and global numbering
659 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
660            IS_GTOLM_DROP - drops the indices with no local value from the output list
661 .   n - number of global indices to map
662 -   idx - global indices to map
663 
664     Output Parameters:
665 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
666 -   idxout - local index of each global index, one must pass in an array long enough
667              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
668              idxout == NULL to determine the required length (returned in nout)
669              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
670              a second time to set the values.
671 
672     Notes:
673     Either nout or idxout may be NULL. idx and idxout may be identical.
674 
675     This is not scalable in memory usage. Each processor requires O(Nglobal) size
676     array to compute these.
677 
678     Level: advanced
679 
680     Developer Note: The manual page states that idx and idxout may be identical but the calling
681        sequence declares idx as const so it cannot be the same as idxout.
682 
683     Concepts: mapping^global to local
684 
685 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
686           ISLocalToGlobalMappingDestroy()
687 @*/
688 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
689                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
690 {
691   PetscInt       i,*globals,nf = 0,tmp,start,end;
692   PetscErrorCode ierr;
693 
694   PetscFunctionBegin;
695   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
696   if (!mapping->globals) {
697     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
698   }
699   globals = mapping->globals;
700   start   = mapping->globalstart;
701   end     = mapping->globalend;
702 
703   if (type == IS_GTOLM_MASK) {
704     if (idxout) {
705       for (i=0; i<n; i++) {
706         if (idx[i] < 0) idxout[i] = idx[i];
707         else if (idx[i] < start) idxout[i] = -1;
708         else if (idx[i] > end)   idxout[i] = -1;
709         else                     idxout[i] = globals[idx[i] - start];
710       }
711     }
712     if (nout) *nout = n;
713   } else {
714     if (idxout) {
715       for (i=0; i<n; i++) {
716         if (idx[i] < 0) continue;
717         if (idx[i] < start) continue;
718         if (idx[i] > end) continue;
719         tmp = globals[idx[i] - start];
720         if (tmp < 0) continue;
721         idxout[nf++] = tmp;
722       }
723     } else {
724       for (i=0; i<n; i++) {
725         if (idx[i] < 0) continue;
726         if (idx[i] < start) continue;
727         if (idx[i] > end) continue;
728         tmp = globals[idx[i] - start];
729         if (tmp < 0) continue;
730         nf++;
731       }
732     }
733     if (nout) *nout = nf;
734   }
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
740 /*@C
741     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
742      each index shared by more than one processor
743 
744     Collective on ISLocalToGlobalMapping
745 
746     Input Parameters:
747 .   mapping - the mapping from local to global indexing
748 
749     Output Parameter:
750 +   nproc - number of processors that are connected to this one
751 .   proc - neighboring processors
752 .   numproc - number of indices for each subdomain (processor)
753 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
754 
755     Level: advanced
756 
757     Concepts: mapping^local to global
758 
759     Fortran Usage:
760 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
761 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
762           PetscInt indices[nproc][numprocmax],ierr)
763         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
764         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
765 
766 
767 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
768           ISLocalToGlobalMappingRestoreInfo()
769 @*/
770 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
771 {
772   PetscErrorCode ierr;
773   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
774   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
775   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
776   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
777   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
778   PetscInt       first_procs,first_numprocs,*first_indices;
779   MPI_Request    *recv_waits,*send_waits;
780   MPI_Status     recv_status,*send_status,*recv_statuses;
781   MPI_Comm       comm;
782   PetscBool      debug = PETSC_FALSE;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
786   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
787   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
788   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
789   if (size == 1) {
790     *nproc         = 0;
791     *procs         = NULL;
792     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
793     (*numprocs)[0] = 0;
794     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
795     (*indices)[0]  = NULL;
796     PetscFunctionReturn(0);
797   }
798 
799   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
800 
801   /*
802     Notes on ISLocalToGlobalMappingGetBlockInfo
803 
804     globally owned node - the nodes that have been assigned to this processor in global
805            numbering, just for this routine.
806 
807     nontrivial globally owned node - node assigned to this processor that is on a subdomain
808            boundary (i.e. is has more than one local owner)
809 
810     locally owned node - node that exists on this processors subdomain
811 
812     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
813            local subdomain
814   */
815   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
816   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
817   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
818 
819   for (i=0; i<n; i++) {
820     if (lindices[i] > max) max = lindices[i];
821   }
822   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
823   Ng++;
824   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
825   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
826   scale  = Ng/size + 1;
827   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
828   rstart = scale*rank;
829 
830   /* determine ownership ranges of global indices */
831   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
832   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
833 
834   /* determine owners of each local node  */
835   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
836   for (i=0; i<n; i++) {
837     proc             = lindices[i]/scale; /* processor that globally owns this index */
838     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
839     owner[i]         = proc;
840     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
841   }
842   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
843   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
844 
845   /* inform other processors of number of messages and max length*/
846   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
847   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
848 
849   /* post receives for owned rows */
850   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
851   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
852   for (i=0; i<nrecvs; i++) {
853     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
854   }
855 
856   /* pack messages containing lists of local nodes to owners */
857   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
858   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
859   starts[0] = 0;
860   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
861   for (i=0; i<n; i++) {
862     sends[starts[owner[i]]++] = lindices[i];
863     sends[starts[owner[i]]++] = i;
864   }
865   ierr = PetscFree(owner);CHKERRQ(ierr);
866   starts[0] = 0;
867   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
868 
869   /* send the messages */
870   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
871   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
872   cnt = 0;
873   for (i=0; i<size; i++) {
874     if (nprocs[2*i]) {
875       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
876       dest[cnt] = i;
877       cnt++;
878     }
879   }
880   ierr = PetscFree(starts);CHKERRQ(ierr);
881 
882   /* wait on receives */
883   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
884   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
885   cnt  = nrecvs;
886   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
887   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
888   while (cnt) {
889     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
890     /* unpack receives into our local space */
891     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
892     source[imdex] = recv_status.MPI_SOURCE;
893     len[imdex]    = len[imdex]/2;
894     /* count how many local owners for each of my global owned indices */
895     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
896     cnt--;
897   }
898   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
899 
900   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
901   nowned  = 0;
902   nownedm = 0;
903   for (i=0; i<ng; i++) {
904     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
905   }
906 
907   /* create single array to contain rank of all local owners of each globally owned index */
908   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
909   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
910   starts[0] = 0;
911   for (i=1; i<ng; i++) {
912     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
913     else starts[i] = starts[i-1];
914   }
915 
916   /* for each nontrival globally owned node list all arriving processors */
917   for (i=0; i<nrecvs; i++) {
918     for (j=0; j<len[i]; j++) {
919       node = recvs[2*i*nmax+2*j]-rstart;
920       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
921     }
922   }
923 
924   if (debug) { /* -----------------------------------  */
925     starts[0] = 0;
926     for (i=1; i<ng; i++) {
927       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
928       else starts[i] = starts[i-1];
929     }
930     for (i=0; i<ng; i++) {
931       if (nownedsenders[i] > 1) {
932         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
933         for (j=0; j<nownedsenders[i]; j++) {
934           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
935         }
936         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
937       }
938     }
939     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
940   } /* -----------------------------------  */
941 
942   /* wait on original sends */
943   if (nsends) {
944     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
945     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
946     ierr = PetscFree(send_status);CHKERRQ(ierr);
947   }
948   ierr = PetscFree(send_waits);CHKERRQ(ierr);
949   ierr = PetscFree(sends);CHKERRQ(ierr);
950   ierr = PetscFree(nprocs);CHKERRQ(ierr);
951 
952   /* pack messages to send back to local owners */
953   starts[0] = 0;
954   for (i=1; i<ng; i++) {
955     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
956     else starts[i] = starts[i-1];
957   }
958   nsends2 = nrecvs;
959   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
960   for (i=0; i<nrecvs; i++) {
961     nprocs[i] = 1;
962     for (j=0; j<len[i]; j++) {
963       node = recvs[2*i*nmax+2*j]-rstart;
964       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
965     }
966   }
967   nt = 0;
968   for (i=0; i<nsends2; i++) nt += nprocs[i];
969 
970   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
971   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
972 
973   starts2[0] = 0;
974   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
975   /*
976      Each message is 1 + nprocs[i] long, and consists of
977        (0) the number of nodes being sent back
978        (1) the local node number,
979        (2) the number of processors sharing it,
980        (3) the processors sharing it
981   */
982   for (i=0; i<nsends2; i++) {
983     cnt = 1;
984     sends2[starts2[i]] = 0;
985     for (j=0; j<len[i]; j++) {
986       node = recvs[2*i*nmax+2*j]-rstart;
987       if (nownedsenders[node] > 1) {
988         sends2[starts2[i]]++;
989         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
990         sends2[starts2[i]+cnt++] = nownedsenders[node];
991         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
992         cnt += nownedsenders[node];
993       }
994     }
995   }
996 
997   /* receive the message lengths */
998   nrecvs2 = nsends;
999   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1000   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1001   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1002   for (i=0; i<nrecvs2; i++) {
1003     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1004   }
1005 
1006   /* send the message lengths */
1007   for (i=0; i<nsends2; i++) {
1008     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1009   }
1010 
1011   /* wait on receives of lens */
1012   if (nrecvs2) {
1013     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1014     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1015     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1016   }
1017   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1018 
1019   starts3[0] = 0;
1020   nt         = 0;
1021   for (i=0; i<nrecvs2-1; i++) {
1022     starts3[i+1] = starts3[i] + lens2[i];
1023     nt          += lens2[i];
1024   }
1025   if (nrecvs2) nt += lens2[nrecvs2-1];
1026 
1027   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1028   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1029   for (i=0; i<nrecvs2; i++) {
1030     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1031   }
1032 
1033   /* send the messages */
1034   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1035   for (i=0; i<nsends2; i++) {
1036     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1037   }
1038 
1039   /* wait on receives */
1040   if (nrecvs2) {
1041     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1042     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1043     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1044   }
1045   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1046   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1047 
1048   if (debug) { /* -----------------------------------  */
1049     cnt = 0;
1050     for (i=0; i<nrecvs2; i++) {
1051       nt = recvs2[cnt++];
1052       for (j=0; j<nt; j++) {
1053         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1054         for (k=0; k<recvs2[cnt+1]; k++) {
1055           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1056         }
1057         cnt += 2 + recvs2[cnt+1];
1058         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1059       }
1060     }
1061     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1062   } /* -----------------------------------  */
1063 
1064   /* count number subdomains for each local node */
1065   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1066   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1067   cnt  = 0;
1068   for (i=0; i<nrecvs2; i++) {
1069     nt = recvs2[cnt++];
1070     for (j=0; j<nt; j++) {
1071       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1072       cnt += 2 + recvs2[cnt+1];
1073     }
1074   }
1075   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1076   *nproc    = nt;
1077   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1078   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1079   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1080   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1081   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1082   cnt       = 0;
1083   for (i=0; i<size; i++) {
1084     if (nprocs[i] > 0) {
1085       bprocs[i]        = cnt;
1086       (*procs)[cnt]    = i;
1087       (*numprocs)[cnt] = nprocs[i];
1088       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1089       cnt++;
1090     }
1091   }
1092 
1093   /* make the list of subdomains for each nontrivial local node */
1094   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1095   cnt  = 0;
1096   for (i=0; i<nrecvs2; i++) {
1097     nt = recvs2[cnt++];
1098     for (j=0; j<nt; j++) {
1099       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1100       cnt += 2 + recvs2[cnt+1];
1101     }
1102   }
1103   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1104   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1105 
1106   /* sort the node indexing by their global numbers */
1107   nt = *nproc;
1108   for (i=0; i<nt; i++) {
1109     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1110     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1111     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1112     ierr = PetscFree(tmp);CHKERRQ(ierr);
1113   }
1114 
1115   if (debug) { /* -----------------------------------  */
1116     nt = *nproc;
1117     for (i=0; i<nt; i++) {
1118       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1119       for (j=0; j<(*numprocs)[i]; j++) {
1120         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1121       }
1122       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1123     }
1124     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1125   } /* -----------------------------------  */
1126 
1127   /* wait on sends */
1128   if (nsends2) {
1129     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1130     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1131     ierr = PetscFree(send_status);CHKERRQ(ierr);
1132   }
1133 
1134   ierr = PetscFree(starts3);CHKERRQ(ierr);
1135   ierr = PetscFree(dest);CHKERRQ(ierr);
1136   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1137 
1138   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1139   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1140   ierr = PetscFree(starts);CHKERRQ(ierr);
1141   ierr = PetscFree(starts2);CHKERRQ(ierr);
1142   ierr = PetscFree(lens2);CHKERRQ(ierr);
1143 
1144   ierr = PetscFree(source);CHKERRQ(ierr);
1145   ierr = PetscFree(len);CHKERRQ(ierr);
1146   ierr = PetscFree(recvs);CHKERRQ(ierr);
1147   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1148   ierr = PetscFree(sends2);CHKERRQ(ierr);
1149 
1150   /* put the information about myself as the first entry in the list */
1151   first_procs    = (*procs)[0];
1152   first_numprocs = (*numprocs)[0];
1153   first_indices  = (*indices)[0];
1154   for (i=0; i<*nproc; i++) {
1155     if ((*procs)[i] == rank) {
1156       (*procs)[0]    = (*procs)[i];
1157       (*numprocs)[0] = (*numprocs)[i];
1158       (*indices)[0]  = (*indices)[i];
1159       (*procs)[i]    = first_procs;
1160       (*numprocs)[i] = first_numprocs;
1161       (*indices)[i]  = first_indices;
1162       break;
1163     }
1164   }
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 #undef __FUNCT__
1169 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1170 /*@C
1171     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1172 
1173     Collective on ISLocalToGlobalMapping
1174 
1175     Input Parameters:
1176 .   mapping - the mapping from local to global indexing
1177 
1178     Output Parameter:
1179 +   nproc - number of processors that are connected to this one
1180 .   proc - neighboring processors
1181 .   numproc - number of indices for each processor
1182 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1183 
1184     Level: advanced
1185 
1186 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1187           ISLocalToGlobalMappingGetInfo()
1188 @*/
1189 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1190 {
1191   PetscErrorCode ierr;
1192   PetscInt       i;
1193 
1194   PetscFunctionBegin;
1195   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1196   ierr = PetscFree(*procs);CHKERRQ(ierr);
1197   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1198   if (*indices) {
1199     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1200     for (i=1; i<*nproc; i++) {
1201       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1202     }
1203     ierr = PetscFree(*indices);CHKERRQ(ierr);
1204   }
1205   PetscFunctionReturn(0);
1206 }
1207 
1208 #undef __FUNCT__
1209 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1210 /*@C
1211     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1212      each index shared by more than one processor
1213 
1214     Collective on ISLocalToGlobalMapping
1215 
1216     Input Parameters:
1217 .   mapping - the mapping from local to global indexing
1218 
1219     Output Parameter:
1220 +   nproc - number of processors that are connected to this one
1221 .   proc - neighboring processors
1222 .   numproc - number of indices for each subdomain (processor)
1223 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1224 
1225     Level: advanced
1226 
1227     Concepts: mapping^local to global
1228 
1229     Fortran Usage:
1230 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1231 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1232           PetscInt indices[nproc][numprocmax],ierr)
1233         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1234         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1235 
1236 
1237 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1238           ISLocalToGlobalMappingRestoreInfo()
1239 @*/
1240 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1241 {
1242   PetscErrorCode ierr;
1243   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;
1244 
1245   PetscFunctionBegin;
1246   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1247   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr);
1248   ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1249   for (i=0; i<*nproc; i++) {
1250     ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr);
1251     for (j=0; j<(*numprocs)[i]; j++) {
1252       for (k=0; k<bs; k++) {
1253         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1254       }
1255     }
1256     (*numprocs)[i] *= bs;
1257   }
1258   if (bindices) {
1259     ierr = PetscFree(bindices[0]);CHKERRQ(ierr);
1260     for (i=1; i<*nproc; i++) {
1261       ierr = PetscFree(bindices[i]);CHKERRQ(ierr);
1262     }
1263     ierr = PetscFree(bindices);CHKERRQ(ierr);
1264   }
1265   PetscFunctionReturn(0);
1266 }
1267 
1268 #undef __FUNCT__
1269 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1270 /*@C
1271     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1272 
1273     Collective on ISLocalToGlobalMapping
1274 
1275     Input Parameters:
1276 .   mapping - the mapping from local to global indexing
1277 
1278     Output Parameter:
1279 +   nproc - number of processors that are connected to this one
1280 .   proc - neighboring processors
1281 .   numproc - number of indices for each processor
1282 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1283 
1284     Level: advanced
1285 
1286 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1287           ISLocalToGlobalMappingGetInfo()
1288 @*/
1289 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1290 {
1291   PetscErrorCode ierr;
1292 
1293   PetscFunctionBegin;
1294   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1295   PetscFunctionReturn(0);
1296 }
1297 
1298 #undef __FUNCT__
1299 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1300 /*@C
1301    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1302 
1303    Not Collective
1304 
1305    Input Arguments:
1306 . ltog - local to global mapping
1307 
1308    Output Arguments:
1309 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1310 
1311    Level: advanced
1312 
1313    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1314 
1315 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1316 @*/
1317 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1318 {
1319   PetscFunctionBegin;
1320   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1321   PetscValidPointer(array,2);
1322   if (ltog->bs == 1) {
1323     *array = ltog->indices;
1324   } else {
1325     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1326     const PetscInt *ii;
1327     PetscErrorCode ierr;
1328 
1329     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1330     *array = jj;
1331     k    = 0;
1332     ii   = ltog->indices;
1333     for (i=0; i<n; i++)
1334       for (j=0; j<bs; j++)
1335         jj[k++] = bs*ii[i] + j;
1336   }
1337   PetscFunctionReturn(0);
1338 }
1339 
1340 #undef __FUNCT__
1341 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1342 /*@C
1343    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1344 
1345    Not Collective
1346 
1347    Input Arguments:
1348 + ltog - local to global mapping
1349 - array - array of indices
1350 
1351    Level: advanced
1352 
1353 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1354 @*/
1355 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1356 {
1357   PetscFunctionBegin;
1358   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1359   PetscValidPointer(array,2);
1360   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1361 
1362   if (ltog->bs > 1) {
1363     PetscErrorCode ierr;
1364     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1365   }
1366   PetscFunctionReturn(0);
1367 }
1368 
1369 #undef __FUNCT__
1370 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1371 /*@C
1372    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1373 
1374    Not Collective
1375 
1376    Input Arguments:
1377 . ltog - local to global mapping
1378 
1379    Output Arguments:
1380 . array - array of indices
1381 
1382    Level: advanced
1383 
1384 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1385 @*/
1386 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1387 {
1388   PetscFunctionBegin;
1389   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1390   PetscValidPointer(array,2);
1391   *array = ltog->indices;
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1397 /*@C
1398    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1399 
1400    Not Collective
1401 
1402    Input Arguments:
1403 + ltog - local to global mapping
1404 - array - array of indices
1405 
1406    Level: advanced
1407 
1408 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1409 @*/
1410 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1411 {
1412   PetscFunctionBegin;
1413   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1414   PetscValidPointer(array,2);
1415   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1416   *array = NULL;
1417   PetscFunctionReturn(0);
1418 }
1419 
1420 #undef __FUNCT__
1421 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1422 /*@C
1423    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1424 
1425    Not Collective
1426 
1427    Input Arguments:
1428 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1429 . n - number of mappings to concatenate
1430 - ltogs - local to global mappings
1431 
1432    Output Arguments:
1433 . ltogcat - new mapping
1434 
1435    Note: this currently always returns a mapping with block size of 1
1436 
1437    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1438 
1439    Level: advanced
1440 
1441 .seealso: ISLocalToGlobalMappingCreate()
1442 @*/
1443 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1444 {
1445   PetscInt       i,cnt,m,*idx;
1446   PetscErrorCode ierr;
1447 
1448   PetscFunctionBegin;
1449   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1450   if (n > 0) PetscValidPointer(ltogs,3);
1451   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1452   PetscValidPointer(ltogcat,4);
1453   for (cnt=0,i=0; i<n; i++) {
1454     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1455     cnt += m;
1456   }
1457   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1458   for (cnt=0,i=0; i<n; i++) {
1459     const PetscInt *subidx;
1460     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1461     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1462     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1463     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1464     cnt += m;
1465   }
1466   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1467   PetscFunctionReturn(0);
1468 }
1469 
1470 
1471