xref: /petsc/src/vec/is/utils/isltog.c (revision db77db73299823266fc3e7c40818affe792d6eba)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
20 {
21   PetscInt       numCells, step = 1;
22   PetscBool      isStride;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBeginHot;
26   *pStart = 0;
27   *points = NULL;
28   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
29   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
30   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
31   *pEnd   = *pStart + numCells;
32   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
33   PetscFunctionReturn(0);
34 }
35 
36 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
37 {
38   PetscInt       step = 1;
39   PetscBool      isStride;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBeginHot;
43   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
44   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
45   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
50 {
51   PetscErrorCode ierr;
52 
53   PetscFunctionBeginHot;
54   if (points) {
55     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
56     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
57   } else {
58     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
59     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 /* -----------------------------------------------------------------------------------------*/
65 
66 /*
67     Creates the global mapping information in the ISLocalToGlobalMapping structure
68 
69     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
70 */
71 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
72 {
73   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
74   PetscErrorCode ierr;
75 
76   PetscFunctionBegin;
77   if (mapping->data) PetscFunctionReturn(0);
78   end   = 0;
79   start = PETSC_MAX_INT;
80 
81   for (i=0; i<n; i++) {
82     if (idx[i] < 0) continue;
83     if (idx[i] < start) start = idx[i];
84     if (idx[i] > end)   end   = idx[i];
85   }
86   if (start > end) {start = 0; end = -1;}
87   mapping->globalstart = start;
88   mapping->globalend   = end;
89   if (!((PetscObject)mapping)->type_name) {
90     if ((end - start) > PetscMax(4*n,1000000)) {
91       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
92     } else {
93       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
94     }
95   }
96   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
101 {
102   PetscErrorCode              ierr;
103   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
104   ISLocalToGlobalMapping_Basic *map;
105 
106   PetscFunctionBegin;
107   start            = mapping->globalstart;
108   end              = mapping->globalend;
109   ierr             = PetscNew(&map);CHKERRQ(ierr);
110   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
111   map->globals     = globals;
112   for (i=0; i<end-start+1; i++) globals[i] = -1;
113   for (i=0; i<n; i++) {
114     if (idx[i] < 0) continue;
115     globals[idx[i] - start] = i;
116   }
117   mapping->data = (void*)map;
118   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
123 {
124   PetscErrorCode              ierr;
125   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
126   ISLocalToGlobalMapping_Hash *map;
127 
128   PetscFunctionBegin;
129   ierr = PetscNew(&map);CHKERRQ(ierr);
130   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
131   for (i=0; i<n; i++ ) {
132     if (idx[i] < 0) continue;
133     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
134   }
135   mapping->data = (void*)map;
136   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
141 {
142   PetscErrorCode              ierr;
143   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
144 
145   PetscFunctionBegin;
146   if (!map) PetscFunctionReturn(0);
147   ierr = PetscFree(map->globals);CHKERRQ(ierr);
148   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
153 {
154   PetscErrorCode              ierr;
155   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
156 
157   PetscFunctionBegin;
158   if (!map) PetscFunctionReturn(0);
159   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
160   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #define GTOLTYPE _Basic
165 #define GTOLNAME _Basic
166 #define GTOLBS mapping->bs
167 #define GTOL(g, local) do {                  \
168     local = map->globals[g/bs - start];      \
169     if (local >= 0) local = bs*local + (g % bs); \
170   } while (0)
171 
172 #include <../src/vec/is/utils/isltog.h>
173 
174 #define GTOLTYPE _Basic
175 #define GTOLNAME Block_Basic
176 #define GTOLBS 1
177 #define GTOL(g, local) do {                  \
178     local = map->globals[g - start];         \
179   } while (0)
180 #include <../src/vec/is/utils/isltog.h>
181 
182 #define GTOLTYPE _Hash
183 #define GTOLNAME _Hash
184 #define GTOLBS mapping->bs
185 #define GTOL(g, local) do {                         \
186     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
187     if (local >= 0) local = bs*local + (g % bs);   \
188    } while (0)
189 #include <../src/vec/is/utils/isltog.h>
190 
191 #define GTOLTYPE _Hash
192 #define GTOLNAME Block_Hash
193 #define GTOLBS 1
194 #define GTOL(g, local) do {                         \
195     (void)PetscHMapIGet(map->globalht,g,&local);    \
196   } while (0)
197 #include <../src/vec/is/utils/isltog.h>
198 
199 /*@
200     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
201 
202     Not Collective
203 
204     Input Parameter:
205 .   ltog - local to global mapping
206 
207     Output Parameter:
208 .   nltog - the duplicated local to global mapping
209 
210     Level: advanced
211 
212 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
213 @*/
214 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
215 {
216   PetscErrorCode ierr;
217 
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
220   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
221   PetscFunctionReturn(0);
222 }
223 
224 /*@
225     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
226 
227     Not Collective
228 
229     Input Parameter:
230 .   ltog - local to global mapping
231 
232     Output Parameter:
233 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
234 
235     Level: advanced
236 
237 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
238 @*/
239 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
240 {
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
243   PetscValidIntPointer(n,2);
244   *n = mapping->bs*mapping->n;
245   PetscFunctionReturn(0);
246 }
247 
248 /*@C
249     ISLocalToGlobalMappingView - View a local to global mapping
250 
251     Not Collective
252 
253     Input Parameters:
254 +   ltog - local to global mapping
255 -   viewer - viewer
256 
257     Level: advanced
258 
259 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
260 @*/
261 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
262 {
263   PetscInt       i;
264   PetscMPIInt    rank;
265   PetscBool      iascii;
266   PetscErrorCode ierr;
267 
268   PetscFunctionBegin;
269   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
270   if (!viewer) {
271     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
272   }
273   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
274 
275   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
276   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
277   if (iascii) {
278     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
279     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
280     for (i=0; i<mapping->n; i++) {
281       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
282     }
283     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
284     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
285   }
286   PetscFunctionReturn(0);
287 }
288 
289 /*@
290     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
291     ordering and a global parallel ordering.
292 
293     Not collective
294 
295     Input Parameter:
296 .   is - index set containing the global numbers for each local number
297 
298     Output Parameter:
299 .   mapping - new mapping data structure
300 
301     Notes:
302     the block size of the IS determines the block size of the mapping
303     Level: advanced
304 
305 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
306 @*/
307 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
308 {
309   PetscErrorCode ierr;
310   PetscInt       n,bs;
311   const PetscInt *indices;
312   MPI_Comm       comm;
313   PetscBool      isblock;
314 
315   PetscFunctionBegin;
316   PetscValidHeaderSpecific(is,IS_CLASSID,1);
317   PetscValidPointer(mapping,2);
318 
319   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
320   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
321   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
322   if (!isblock) {
323     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
324     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
325     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
326   } else {
327     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
328     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
329     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
330     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
331   }
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
337     ordering and a global parallel ordering.
338 
339     Collective
340 
341     Input Parameter:
342 +   sf - star forest mapping contiguous local indices to (rank, offset)
343 -   start - first global index on this process
344 
345     Output Parameter:
346 .   mapping - new mapping data structure
347 
348     Level: advanced
349 
350 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
351 @*/
352 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
353 {
354   PetscErrorCode ierr;
355   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
356   const PetscInt *ilocal;
357   MPI_Comm       comm;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
361   PetscValidPointer(mapping,3);
362 
363   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
364   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
365   if (ilocal) {
366     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
367   }
368   else maxlocal = nleaves;
369   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
370   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
371   for (i=0; i<nroots; i++) globals[i] = start + i;
372   for (i=0; i<maxlocal; i++) ltog[i] = -1;
373   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
374   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
376   ierr = PetscFree(globals);CHKERRQ(ierr);
377   PetscFunctionReturn(0);
378 }
379 
380 /*@
381     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
382 
383     Not collective
384 
385     Input Parameters:
386 +   mapping - mapping data structure
387 -   bs - the blocksize
388 
389     Level: advanced
390 
391 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
392 @*/
393 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
394 {
395   PetscInt       *nid;
396   const PetscInt *oid;
397   PetscInt       i,cn,on,obs,nn;
398   PetscErrorCode ierr;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
402   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
403   if (bs == mapping->bs) PetscFunctionReturn(0);
404   on  = mapping->n;
405   obs = mapping->bs;
406   oid = mapping->indices;
407   nn  = (on*obs)/bs;
408   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
409 
410   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
411   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
412   for (i=0;i<nn;i++) {
413     PetscInt j;
414     for (j=0,cn=0;j<bs-1;j++) {
415       if (oid[i*bs+j] < 0) { cn++; continue; }
416       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
417     }
418     if (oid[i*bs+j] < 0) cn++;
419     if (cn) {
420       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
421       nid[i] = -1;
422     } else {
423       nid[i] = oid[i*bs]/bs;
424     }
425   }
426   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
427 
428   mapping->n           = nn;
429   mapping->bs          = bs;
430   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
431   mapping->indices     = nid;
432   mapping->globalstart = 0;
433   mapping->globalend   = 0;
434 
435   /* reset the cached information */
436   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
437   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
438   if (mapping->info_indices) {
439     PetscInt i;
440 
441     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
442     for (i=1; i<mapping->info_nproc; i++) {
443       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
444     }
445     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
446   }
447   mapping->info_cached = PETSC_FALSE;
448 
449   if (mapping->ops->destroy) {
450     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
451   }
452   PetscFunctionReturn(0);
453 }
454 
455 /*@
456     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
457     ordering and a global parallel ordering.
458 
459     Not Collective
460 
461     Input Parameters:
462 .   mapping - mapping data structure
463 
464     Output Parameter:
465 .   bs - the blocksize
466 
467     Level: advanced
468 
469 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
470 @*/
471 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
472 {
473   PetscFunctionBegin;
474   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
475   *bs = mapping->bs;
476   PetscFunctionReturn(0);
477 }
478 
479 /*@
480     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
481     ordering and a global parallel ordering.
482 
483     Not Collective, but communicator may have more than one process
484 
485     Input Parameters:
486 +   comm - MPI communicator
487 .   bs - the block size
488 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
489 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
490 -   mode - see PetscCopyMode
491 
492     Output Parameter:
493 .   mapping - new mapping data structure
494 
495     Notes:
496     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
497 
498     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
499     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
500     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
501 
502     Level: advanced
503 
504 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
505           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
506 @*/
507 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
508 {
509   PetscErrorCode ierr;
510   PetscInt       *in;
511 
512   PetscFunctionBegin;
513   if (n) PetscValidIntPointer(indices,3);
514   PetscValidPointer(mapping,4);
515 
516   *mapping = NULL;
517   ierr = ISInitializePackage();CHKERRQ(ierr);
518 
519   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
520                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
521   (*mapping)->n             = n;
522   (*mapping)->bs            = bs;
523   (*mapping)->info_cached   = PETSC_FALSE;
524   (*mapping)->info_free     = PETSC_FALSE;
525   (*mapping)->info_procs    = NULL;
526   (*mapping)->info_numprocs = NULL;
527   (*mapping)->info_indices  = NULL;
528   (*mapping)->info_nodec    = NULL;
529   (*mapping)->info_nodei    = NULL;
530 
531   (*mapping)->ops->globaltolocalmappingapply      = NULL;
532   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
533   (*mapping)->ops->destroy                        = NULL;
534   if (mode == PETSC_COPY_VALUES) {
535     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
536     ierr = PetscArraycpy(in,indices,n);CHKERRQ(ierr);
537     (*mapping)->indices = in;
538     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
539   } else if (mode == PETSC_OWN_POINTER) {
540     (*mapping)->indices = (PetscInt*)indices;
541     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
542   }
543   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
544   PetscFunctionReturn(0);
545 }
546 
547 PetscFunctionList ISLocalToGlobalMappingList = NULL;
548 
549 
550 /*@
551    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
552 
553    Not collective
554 
555    Input Parameters:
556 .  mapping - mapping data structure
557 
558    Level: advanced
559 
560 @*/
561 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
562 {
563   PetscErrorCode             ierr;
564   char                       type[256];
565   ISLocalToGlobalMappingType defaulttype = "Not set";
566   PetscBool                  flg;
567 
568   PetscFunctionBegin;
569   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
570   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
571   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
572   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
573   if (flg) {
574     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
575   }
576   ierr = PetscOptionsEnd();CHKERRQ(ierr);
577   PetscFunctionReturn(0);
578 }
579 
580 /*@
581    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
582    ordering and a global parallel ordering.
583 
584    Note Collective
585 
586    Input Parameters:
587 .  mapping - mapping data structure
588 
589    Level: advanced
590 
591 .seealso: ISLocalToGlobalMappingCreate()
592 @*/
593 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
594 {
595   PetscErrorCode ierr;
596 
597   PetscFunctionBegin;
598   if (!*mapping) PetscFunctionReturn(0);
599   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
600   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
601   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
602   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
603   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
604   if ((*mapping)->info_indices) {
605     PetscInt i;
606 
607     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
608     for (i=1; i<(*mapping)->info_nproc; i++) {
609       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
610     }
611     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
612   }
613   if ((*mapping)->info_nodei) {
614     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
615   }
616   ierr = PetscFree2((*mapping)->info_nodec,(*mapping)->info_nodei);CHKERRQ(ierr);
617   if ((*mapping)->ops->destroy) {
618     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
619   }
620   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
621   *mapping = 0;
622   PetscFunctionReturn(0);
623 }
624 
625 /*@
626     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
627     a new index set using the global numbering defined in an ISLocalToGlobalMapping
628     context.
629 
630     Collective on is
631 
632     Input Parameters:
633 +   mapping - mapping between local and global numbering
634 -   is - index set in local numbering
635 
636     Output Parameters:
637 .   newis - index set in global numbering
638 
639     Notes:
640     The output IS will have the same communicator of the input IS.
641 
642     Level: advanced
643 
644 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
645           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
646 @*/
647 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
648 {
649   PetscErrorCode ierr;
650   PetscInt       n,*idxout;
651   const PetscInt *idxin;
652 
653   PetscFunctionBegin;
654   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
655   PetscValidHeaderSpecific(is,IS_CLASSID,2);
656   PetscValidPointer(newis,3);
657 
658   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
659   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
660   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
661   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
662   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
663   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
664   PetscFunctionReturn(0);
665 }
666 
667 /*@
668    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
669    and converts them to the global numbering.
670 
671    Not collective
672 
673    Input Parameters:
674 +  mapping - the local to global mapping context
675 .  N - number of integers
676 -  in - input indices in local numbering
677 
678    Output Parameter:
679 .  out - indices in global numbering
680 
681    Notes:
682    The in and out array parameters may be identical.
683 
684    Level: advanced
685 
686 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
687           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
688           AOPetscToApplication(), ISGlobalToLocalMappingApply()
689 
690 @*/
691 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
692 {
693   PetscInt i,bs,Nmax;
694 
695   PetscFunctionBegin;
696   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
697   bs   = mapping->bs;
698   Nmax = bs*mapping->n;
699   if (bs == 1) {
700     const PetscInt *idx = mapping->indices;
701     for (i=0; i<N; i++) {
702       if (in[i] < 0) {
703         out[i] = in[i];
704         continue;
705       }
706       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
707       out[i] = idx[in[i]];
708     }
709   } else {
710     const PetscInt *idx = mapping->indices;
711     for (i=0; i<N; i++) {
712       if (in[i] < 0) {
713         out[i] = in[i];
714         continue;
715       }
716       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
717       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
718     }
719   }
720   PetscFunctionReturn(0);
721 }
722 
723 /*@
724    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
725 
726    Not collective
727 
728    Input Parameters:
729 +  mapping - the local to global mapping context
730 .  N - number of integers
731 -  in - input indices in local block numbering
732 
733    Output Parameter:
734 .  out - indices in global block numbering
735 
736    Notes:
737    The in and out array parameters may be identical.
738 
739    Example:
740      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
741      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
742 
743    Level: advanced
744 
745 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
746           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
747           AOPetscToApplication(), ISGlobalToLocalMappingApply()
748 
749 @*/
750 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
751 {
752 
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
755   {
756     PetscInt i,Nmax = mapping->n;
757     const PetscInt *idx = mapping->indices;
758 
759     for (i=0; i<N; i++) {
760       if (in[i] < 0) {
761         out[i] = in[i];
762         continue;
763       }
764       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
765       out[i] = idx[in[i]];
766     }
767   }
768   PetscFunctionReturn(0);
769 }
770 
771 /*@
772     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
773     specified with a global numbering.
774 
775     Not collective
776 
777     Input Parameters:
778 +   mapping - mapping between local and global numbering
779 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
780            IS_GTOLM_DROP - drops the indices with no local value from the output list
781 .   n - number of global indices to map
782 -   idx - global indices to map
783 
784     Output Parameters:
785 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
786 -   idxout - local index of each global index, one must pass in an array long enough
787              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
788              idxout == NULL to determine the required length (returned in nout)
789              and then allocate the required space and call ISGlobalToLocalMappingApply()
790              a second time to set the values.
791 
792     Notes:
793     Either nout or idxout may be NULL. idx and idxout may be identical.
794 
795     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
796     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
797     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
798 
799     Level: advanced
800 
801     Developer Note: The manual page states that idx and idxout may be identical but the calling
802        sequence declares idx as const so it cannot be the same as idxout.
803 
804 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
805           ISLocalToGlobalMappingDestroy()
806 @*/
807 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
808 {
809   PetscErrorCode ierr;
810 
811   PetscFunctionBegin;
812   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
813   if (!mapping->data) {
814     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
815   }
816   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
817   PetscFunctionReturn(0);
818 }
819 
820 /*@
821     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
822     a new index set using the local numbering defined in an ISLocalToGlobalMapping
823     context.
824 
825     Not collective
826 
827     Input Parameters:
828 +   mapping - mapping between local and global numbering
829 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
830            IS_GTOLM_DROP - drops the indices with no local value from the output list
831 -   is - index set in global numbering
832 
833     Output Parameters:
834 .   newis - index set in local numbering
835 
836     Notes:
837     The output IS will be sequential, as it encodes a purely local operation
838 
839     Level: advanced
840 
841 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
842           ISLocalToGlobalMappingDestroy()
843 @*/
844 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
845 {
846   PetscErrorCode ierr;
847   PetscInt       n,nout,*idxout;
848   const PetscInt *idxin;
849 
850   PetscFunctionBegin;
851   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
852   PetscValidHeaderSpecific(is,IS_CLASSID,3);
853   PetscValidPointer(newis,4);
854 
855   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
856   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
857   if (type == IS_GTOLM_MASK) {
858     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
859   } else {
860     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
861     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
862   }
863   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
864   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
865   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
866   PetscFunctionReturn(0);
867 }
868 
869 /*@
870     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
871     specified with a block global numbering.
872 
873     Not collective
874 
875     Input Parameters:
876 +   mapping - mapping between local and global numbering
877 .   type - IS_GTOLM_MASK - maps global indices with no local value to -1 in the output list (i.e., mask them)
878            IS_GTOLM_DROP - drops the indices with no local value from the output list
879 .   n - number of global indices to map
880 -   idx - global indices to map
881 
882     Output Parameters:
883 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
884 -   idxout - local index of each global index, one must pass in an array long enough
885              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
886              idxout == NULL to determine the required length (returned in nout)
887              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
888              a second time to set the values.
889 
890     Notes:
891     Either nout or idxout may be NULL. idx and idxout may be identical.
892 
893     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
894     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
895     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
896 
897     Level: advanced
898 
899     Developer Note: The manual page states that idx and idxout may be identical but the calling
900        sequence declares idx as const so it cannot be the same as idxout.
901 
902 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
903           ISLocalToGlobalMappingDestroy()
904 @*/
905 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
906                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
907 {
908   PetscErrorCode ierr;
909 
910   PetscFunctionBegin;
911   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
912   if (!mapping->data) {
913     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
914   }
915   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
916   PetscFunctionReturn(0);
917 }
918 
919 
920 /*@C
921     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
922      each index shared by more than one processor
923 
924     Collective on ISLocalToGlobalMapping
925 
926     Input Parameters:
927 .   mapping - the mapping from local to global indexing
928 
929     Output Parameter:
930 +   nproc - number of processors that are connected to this one
931 .   proc - neighboring processors
932 .   numproc - number of indices for each subdomain (processor)
933 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
934 
935     Level: advanced
936 
937     Fortran Usage:
938 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
939 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
940           PetscInt indices[nproc][numprocmax],ierr)
941         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
942         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
943 
944 
945 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
946           ISLocalToGlobalMappingRestoreInfo()
947 @*/
948 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
949 {
950   PetscErrorCode ierr;
951 
952   PetscFunctionBegin;
953   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
954   if (mapping->info_cached) {
955     *nproc    = mapping->info_nproc;
956     *procs    = mapping->info_procs;
957     *numprocs = mapping->info_numprocs;
958     *indices  = mapping->info_indices;
959   } else {
960     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
961   }
962   PetscFunctionReturn(0);
963 }
964 
965 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
966 {
967   PetscErrorCode ierr;
968   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
969   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
970   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
971   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
972   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
973   PetscInt       first_procs,first_numprocs,*first_indices;
974   MPI_Request    *recv_waits,*send_waits;
975   MPI_Status     recv_status,*send_status,*recv_statuses;
976   MPI_Comm       comm;
977   PetscBool      debug = PETSC_FALSE;
978 
979   PetscFunctionBegin;
980   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
981   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
982   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
983   if (size == 1) {
984     *nproc         = 0;
985     *procs         = NULL;
986     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
987     (*numprocs)[0] = 0;
988     ierr           = PetscNew(indices);CHKERRQ(ierr);
989     (*indices)[0]  = NULL;
990     /* save info for reuse */
991     mapping->info_nproc = *nproc;
992     mapping->info_procs = *procs;
993     mapping->info_numprocs = *numprocs;
994     mapping->info_indices = *indices;
995     mapping->info_cached = PETSC_TRUE;
996     PetscFunctionReturn(0);
997   }
998 
999   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1000 
1001   /*
1002     Notes on ISLocalToGlobalMappingGetBlockInfo
1003 
1004     globally owned node - the nodes that have been assigned to this processor in global
1005            numbering, just for this routine.
1006 
1007     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1008            boundary (i.e. is has more than one local owner)
1009 
1010     locally owned node - node that exists on this processors subdomain
1011 
1012     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1013            local subdomain
1014   */
1015   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1016   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1017   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1018 
1019   for (i=0; i<n; i++) {
1020     if (lindices[i] > max) max = lindices[i];
1021   }
1022   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1023   Ng++;
1024   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1025   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1026   scale  = Ng/size + 1;
1027   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1028   rstart = scale*rank;
1029 
1030   /* determine ownership ranges of global indices */
1031   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1032   ierr = PetscArrayzero(nprocs,2*size);CHKERRQ(ierr);
1033 
1034   /* determine owners of each local node  */
1035   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1036   for (i=0; i<n; i++) {
1037     proc             = lindices[i]/scale; /* processor that globally owns this index */
1038     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1039     owner[i]         = proc;
1040     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1041   }
1042   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1043   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1044 
1045   /* inform other processors of number of messages and max length*/
1046   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1047   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1048 
1049   /* post receives for owned rows */
1050   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1051   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1052   for (i=0; i<nrecvs; i++) {
1053     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1054   }
1055 
1056   /* pack messages containing lists of local nodes to owners */
1057   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1058   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1059   starts[0] = 0;
1060   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1061   for (i=0; i<n; i++) {
1062     sends[starts[owner[i]]++] = lindices[i];
1063     sends[starts[owner[i]]++] = i;
1064   }
1065   ierr = PetscFree(owner);CHKERRQ(ierr);
1066   starts[0] = 0;
1067   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1068 
1069   /* send the messages */
1070   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1071   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1072   cnt = 0;
1073   for (i=0; i<size; i++) {
1074     if (nprocs[2*i]) {
1075       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1076       dest[cnt] = i;
1077       cnt++;
1078     }
1079   }
1080   ierr = PetscFree(starts);CHKERRQ(ierr);
1081 
1082   /* wait on receives */
1083   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1084   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1085   cnt  = nrecvs;
1086   ierr = PetscCalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1087   while (cnt) {
1088     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1089     /* unpack receives into our local space */
1090     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1091     source[imdex] = recv_status.MPI_SOURCE;
1092     len[imdex]    = len[imdex]/2;
1093     /* count how many local owners for each of my global owned indices */
1094     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1095     cnt--;
1096   }
1097   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1098 
1099   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1100   nowned  = 0;
1101   nownedm = 0;
1102   for (i=0; i<ng; i++) {
1103     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1104   }
1105 
1106   /* create single array to contain rank of all local owners of each globally owned index */
1107   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1108   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1109   starts[0] = 0;
1110   for (i=1; i<ng; i++) {
1111     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1112     else starts[i] = starts[i-1];
1113   }
1114 
1115   /* for each nontrival globally owned node list all arriving processors */
1116   for (i=0; i<nrecvs; i++) {
1117     for (j=0; j<len[i]; j++) {
1118       node = recvs[2*i*nmax+2*j]-rstart;
1119       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1120     }
1121   }
1122 
1123   if (debug) { /* -----------------------------------  */
1124     starts[0] = 0;
1125     for (i=1; i<ng; i++) {
1126       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1127       else starts[i] = starts[i-1];
1128     }
1129     for (i=0; i<ng; i++) {
1130       if (nownedsenders[i] > 1) {
1131         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1132         for (j=0; j<nownedsenders[i]; j++) {
1133           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1134         }
1135         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1136       }
1137     }
1138     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1139   } /* -----------------------------------  */
1140 
1141   /* wait on original sends */
1142   if (nsends) {
1143     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1144     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1145     ierr = PetscFree(send_status);CHKERRQ(ierr);
1146   }
1147   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1148   ierr = PetscFree(sends);CHKERRQ(ierr);
1149   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1150 
1151   /* pack messages to send back to local owners */
1152   starts[0] = 0;
1153   for (i=1; i<ng; i++) {
1154     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1155     else starts[i] = starts[i-1];
1156   }
1157   nsends2 = nrecvs;
1158   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1159   for (i=0; i<nrecvs; i++) {
1160     nprocs[i] = 1;
1161     for (j=0; j<len[i]; j++) {
1162       node = recvs[2*i*nmax+2*j]-rstart;
1163       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1164     }
1165   }
1166   nt = 0;
1167   for (i=0; i<nsends2; i++) nt += nprocs[i];
1168 
1169   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1170   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1171 
1172   starts2[0] = 0;
1173   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1174   /*
1175      Each message is 1 + nprocs[i] long, and consists of
1176        (0) the number of nodes being sent back
1177        (1) the local node number,
1178        (2) the number of processors sharing it,
1179        (3) the processors sharing it
1180   */
1181   for (i=0; i<nsends2; i++) {
1182     cnt = 1;
1183     sends2[starts2[i]] = 0;
1184     for (j=0; j<len[i]; j++) {
1185       node = recvs[2*i*nmax+2*j]-rstart;
1186       if (nownedsenders[node] > 1) {
1187         sends2[starts2[i]]++;
1188         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1189         sends2[starts2[i]+cnt++] = nownedsenders[node];
1190         ierr = PetscArraycpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]);CHKERRQ(ierr);
1191         cnt += nownedsenders[node];
1192       }
1193     }
1194   }
1195 
1196   /* receive the message lengths */
1197   nrecvs2 = nsends;
1198   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1199   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1200   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1201   for (i=0; i<nrecvs2; i++) {
1202     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1203   }
1204 
1205   /* send the message lengths */
1206   for (i=0; i<nsends2; i++) {
1207     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1208   }
1209 
1210   /* wait on receives of lens */
1211   if (nrecvs2) {
1212     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1213     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1214     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1215   }
1216   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1217 
1218   starts3[0] = 0;
1219   nt         = 0;
1220   for (i=0; i<nrecvs2-1; i++) {
1221     starts3[i+1] = starts3[i] + lens2[i];
1222     nt          += lens2[i];
1223   }
1224   if (nrecvs2) nt += lens2[nrecvs2-1];
1225 
1226   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1227   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1228   for (i=0; i<nrecvs2; i++) {
1229     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1230   }
1231 
1232   /* send the messages */
1233   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1234   for (i=0; i<nsends2; i++) {
1235     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1236   }
1237 
1238   /* wait on receives */
1239   if (nrecvs2) {
1240     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1241     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1242     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1243   }
1244   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1245   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1246 
1247   if (debug) { /* -----------------------------------  */
1248     cnt = 0;
1249     for (i=0; i<nrecvs2; i++) {
1250       nt = recvs2[cnt++];
1251       for (j=0; j<nt; j++) {
1252         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1253         for (k=0; k<recvs2[cnt+1]; k++) {
1254           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1255         }
1256         cnt += 2 + recvs2[cnt+1];
1257         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1258       }
1259     }
1260     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1261   } /* -----------------------------------  */
1262 
1263   /* count number subdomains for each local node */
1264   ierr = PetscCalloc1(size,&nprocs);CHKERRQ(ierr);
1265   cnt  = 0;
1266   for (i=0; i<nrecvs2; i++) {
1267     nt = recvs2[cnt++];
1268     for (j=0; j<nt; j++) {
1269       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1270       cnt += 2 + recvs2[cnt+1];
1271     }
1272   }
1273   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1274   *nproc    = nt;
1275   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1276   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1277   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1278   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1279   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1280   cnt  = 0;
1281   for (i=0; i<size; i++) {
1282     if (nprocs[i] > 0) {
1283       bprocs[i]        = cnt;
1284       (*procs)[cnt]    = i;
1285       (*numprocs)[cnt] = nprocs[i];
1286       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1287       cnt++;
1288     }
1289   }
1290 
1291   /* make the list of subdomains for each nontrivial local node */
1292   ierr = PetscArrayzero(*numprocs,nt);CHKERRQ(ierr);
1293   cnt  = 0;
1294   for (i=0; i<nrecvs2; i++) {
1295     nt = recvs2[cnt++];
1296     for (j=0; j<nt; j++) {
1297       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1298       cnt += 2 + recvs2[cnt+1];
1299     }
1300   }
1301   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1302   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1303 
1304   /* sort the node indexing by their global numbers */
1305   nt = *nproc;
1306   for (i=0; i<nt; i++) {
1307     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1308     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1309     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1310     ierr = PetscFree(tmp);CHKERRQ(ierr);
1311   }
1312 
1313   if (debug) { /* -----------------------------------  */
1314     nt = *nproc;
1315     for (i=0; i<nt; i++) {
1316       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1317       for (j=0; j<(*numprocs)[i]; j++) {
1318         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1319       }
1320       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1321     }
1322     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1323   } /* -----------------------------------  */
1324 
1325   /* wait on sends */
1326   if (nsends2) {
1327     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1328     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1329     ierr = PetscFree(send_status);CHKERRQ(ierr);
1330   }
1331 
1332   ierr = PetscFree(starts3);CHKERRQ(ierr);
1333   ierr = PetscFree(dest);CHKERRQ(ierr);
1334   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1335 
1336   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1337   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1338   ierr = PetscFree(starts);CHKERRQ(ierr);
1339   ierr = PetscFree(starts2);CHKERRQ(ierr);
1340   ierr = PetscFree(lens2);CHKERRQ(ierr);
1341 
1342   ierr = PetscFree(source);CHKERRQ(ierr);
1343   ierr = PetscFree(len);CHKERRQ(ierr);
1344   ierr = PetscFree(recvs);CHKERRQ(ierr);
1345   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1346   ierr = PetscFree(sends2);CHKERRQ(ierr);
1347 
1348   /* put the information about myself as the first entry in the list */
1349   first_procs    = (*procs)[0];
1350   first_numprocs = (*numprocs)[0];
1351   first_indices  = (*indices)[0];
1352   for (i=0; i<*nproc; i++) {
1353     if ((*procs)[i] == rank) {
1354       (*procs)[0]    = (*procs)[i];
1355       (*numprocs)[0] = (*numprocs)[i];
1356       (*indices)[0]  = (*indices)[i];
1357       (*procs)[i]    = first_procs;
1358       (*numprocs)[i] = first_numprocs;
1359       (*indices)[i]  = first_indices;
1360       break;
1361     }
1362   }
1363 
1364   /* save info for reuse */
1365   mapping->info_nproc = *nproc;
1366   mapping->info_procs = *procs;
1367   mapping->info_numprocs = *numprocs;
1368   mapping->info_indices = *indices;
1369   mapping->info_cached = PETSC_TRUE;
1370   PetscFunctionReturn(0);
1371 }
1372 
1373 /*@C
1374     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1375 
1376     Collective on ISLocalToGlobalMapping
1377 
1378     Input Parameters:
1379 .   mapping - the mapping from local to global indexing
1380 
1381     Output Parameter:
1382 +   nproc - number of processors that are connected to this one
1383 .   proc - neighboring processors
1384 .   numproc - number of indices for each processor
1385 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1386 
1387     Level: advanced
1388 
1389 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1390           ISLocalToGlobalMappingGetInfo()
1391 @*/
1392 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1393 {
1394   PetscErrorCode ierr;
1395 
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1398   if (mapping->info_free) {
1399     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1400     if (*indices) {
1401       PetscInt i;
1402 
1403       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1404       for (i=1; i<*nproc; i++) {
1405         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1406       }
1407       ierr = PetscFree(*indices);CHKERRQ(ierr);
1408     }
1409   }
1410   *nproc    = 0;
1411   *procs    = NULL;
1412   *numprocs = NULL;
1413   *indices  = NULL;
1414   PetscFunctionReturn(0);
1415 }
1416 
1417 /*@C
1418     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1419      each index shared by more than one processor
1420 
1421     Collective on ISLocalToGlobalMapping
1422 
1423     Input Parameters:
1424 .   mapping - the mapping from local to global indexing
1425 
1426     Output Parameter:
1427 +   nproc - number of processors that are connected to this one
1428 .   proc - neighboring processors
1429 .   numproc - number of indices for each subdomain (processor)
1430 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1431 
1432     Level: advanced
1433 
1434     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1435 
1436     Fortran Usage:
1437 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1438 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1439           PetscInt indices[nproc][numprocmax],ierr)
1440         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1441         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1442 
1443 
1444 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1445           ISLocalToGlobalMappingRestoreInfo()
1446 @*/
1447 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1448 {
1449   PetscErrorCode ierr;
1450   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1451 
1452   PetscFunctionBegin;
1453   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1454   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1455   if (bs > 1) { /* we need to expand the cached info */
1456     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1457     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1458     for (i=0; i<*nproc; i++) {
1459       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1460       for (j=0; j<bnumprocs[i]; j++) {
1461         for (k=0; k<bs; k++) {
1462           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1463         }
1464       }
1465       (*numprocs)[i] = bnumprocs[i]*bs;
1466     }
1467     mapping->info_free = PETSC_TRUE;
1468   } else {
1469     *numprocs = bnumprocs;
1470     *indices  = bindices;
1471   }
1472   PetscFunctionReturn(0);
1473 }
1474 
1475 /*@C
1476     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1477 
1478     Collective on ISLocalToGlobalMapping
1479 
1480     Input Parameters:
1481 .   mapping - the mapping from local to global indexing
1482 
1483     Output Parameter:
1484 +   nproc - number of processors that are connected to this one
1485 .   proc - neighboring processors
1486 .   numproc - number of indices for each processor
1487 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1488 
1489     Level: advanced
1490 
1491 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1492           ISLocalToGlobalMappingGetInfo()
1493 @*/
1494 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1495 {
1496   PetscErrorCode ierr;
1497 
1498   PetscFunctionBegin;
1499   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1500   PetscFunctionReturn(0);
1501 }
1502 
1503 /*@C
1504     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1505 
1506     Collective on ISLocalToGlobalMapping
1507 
1508     Input Parameters:
1509 .   mapping - the mapping from local to global indexing
1510 
1511     Output Parameter:
1512 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1513 .   count - number of neighboring processors per node
1514 -   indices - indices of processes sharing the node (sorted)
1515 
1516     Level: advanced
1517 
1518     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1519 
1520 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1521           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1522 @*/
1523 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1524 {
1525   PetscInt       n;
1526   PetscErrorCode ierr;
1527 
1528   PetscFunctionBegin;
1529   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1530   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1531   if (!mapping->info_nodec) {
1532     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1533 
1534     ierr = PetscMalloc2(n+1,&mapping->info_nodec,n,&mapping->info_nodei);CHKERRQ(ierr);
1535     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1536     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1;}
1537     m = n;
1538     mapping->info_nodec[n] = 0;
1539     for (i=1;i<n_neigh;i++) {
1540       PetscInt j;
1541 
1542       m += n_shared[i];
1543       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1544     }
1545     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1546     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1547     ierr = PetscArrayzero(mapping->info_nodec,n);CHKERRQ(ierr);
1548     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1549     for (i=1;i<n_neigh;i++) {
1550       PetscInt j;
1551 
1552       for (j=0;j<n_shared[i];j++) {
1553         PetscInt k = shared[i][j];
1554 
1555         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1556         mapping->info_nodec[k] += 1;
1557       }
1558     }
1559     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1560     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1561   }
1562   if (nnodes)  *nnodes  = n;
1563   if (count)   *count   = mapping->info_nodec;
1564   if (indices) *indices = mapping->info_nodei;
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 /*@C
1569     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1570 
1571     Collective on ISLocalToGlobalMapping
1572 
1573     Input Parameters:
1574 .   mapping - the mapping from local to global indexing
1575 
1576     Output Parameter:
1577 +   nnodes - number of local nodes
1578 .   count - number of neighboring processors per node
1579 -   indices - indices of processes sharing the node (sorted)
1580 
1581     Level: advanced
1582 
1583 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1584           ISLocalToGlobalMappingGetInfo()
1585 @*/
1586 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1587 {
1588   PetscFunctionBegin;
1589   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1590   if (nnodes)  *nnodes  = 0;
1591   if (count)   *count   = NULL;
1592   if (indices) *indices = NULL;
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 /*@C
1597    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1598 
1599    Not Collective
1600 
1601    Input Arguments:
1602 . ltog - local to global mapping
1603 
1604    Output Arguments:
1605 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1606 
1607    Level: advanced
1608 
1609    Notes:
1610     ISLocalToGlobalMappingGetSize() returns the length the this array
1611 
1612 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1613 @*/
1614 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1615 {
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1618   PetscValidPointer(array,2);
1619   if (ltog->bs == 1) {
1620     *array = ltog->indices;
1621   } else {
1622     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1623     const PetscInt *ii;
1624     PetscErrorCode ierr;
1625 
1626     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1627     *array = jj;
1628     k    = 0;
1629     ii   = ltog->indices;
1630     for (i=0; i<n; i++)
1631       for (j=0; j<bs; j++)
1632         jj[k++] = bs*ii[i] + j;
1633   }
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 /*@C
1638    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1639 
1640    Not Collective
1641 
1642    Input Arguments:
1643 + ltog - local to global mapping
1644 - array - array of indices
1645 
1646    Level: advanced
1647 
1648 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1649 @*/
1650 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1651 {
1652   PetscFunctionBegin;
1653   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1654   PetscValidPointer(array,2);
1655   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1656 
1657   if (ltog->bs > 1) {
1658     PetscErrorCode ierr;
1659     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1660   }
1661   PetscFunctionReturn(0);
1662 }
1663 
1664 /*@C
1665    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1666 
1667    Not Collective
1668 
1669    Input Arguments:
1670 . ltog - local to global mapping
1671 
1672    Output Arguments:
1673 . array - array of indices
1674 
1675    Level: advanced
1676 
1677 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1678 @*/
1679 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1680 {
1681   PetscFunctionBegin;
1682   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1683   PetscValidPointer(array,2);
1684   *array = ltog->indices;
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 /*@C
1689    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1690 
1691    Not Collective
1692 
1693    Input Arguments:
1694 + ltog - local to global mapping
1695 - array - array of indices
1696 
1697    Level: advanced
1698 
1699 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1700 @*/
1701 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1702 {
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1705   PetscValidPointer(array,2);
1706   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1707   *array = NULL;
1708   PetscFunctionReturn(0);
1709 }
1710 
1711 /*@C
1712    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1713 
1714    Not Collective
1715 
1716    Input Arguments:
1717 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1718 . n - number of mappings to concatenate
1719 - ltogs - local to global mappings
1720 
1721    Output Arguments:
1722 . ltogcat - new mapping
1723 
1724    Note: this currently always returns a mapping with block size of 1
1725 
1726    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1727 
1728    Level: advanced
1729 
1730 .seealso: ISLocalToGlobalMappingCreate()
1731 @*/
1732 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1733 {
1734   PetscInt       i,cnt,m,*idx;
1735   PetscErrorCode ierr;
1736 
1737   PetscFunctionBegin;
1738   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1739   if (n > 0) PetscValidPointer(ltogs,3);
1740   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1741   PetscValidPointer(ltogcat,4);
1742   for (cnt=0,i=0; i<n; i++) {
1743     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1744     cnt += m;
1745   }
1746   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1747   for (cnt=0,i=0; i<n; i++) {
1748     const PetscInt *subidx;
1749     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1750     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1751     ierr = PetscArraycpy(&idx[cnt],subidx,m);CHKERRQ(ierr);
1752     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1753     cnt += m;
1754   }
1755   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 /*MC
1760       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1761                                     used this is good for only small and moderate size problems.
1762 
1763    Options Database Keys:
1764 .   -islocaltoglobalmapping_type basic - select this method
1765 
1766    Level: beginner
1767 
1768 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1769 M*/
1770 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1771 {
1772   PetscFunctionBegin;
1773   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1774   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1775   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1776   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 /*MC
1781       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1782                                     used this is good for large memory problems.
1783 
1784    Options Database Keys:
1785 .   -islocaltoglobalmapping_type hash - select this method
1786 
1787 
1788    Notes:
1789     This is selected automatically for large problems if the user does not set the type.
1790 
1791    Level: beginner
1792 
1793 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1794 M*/
1795 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1796 {
1797   PetscFunctionBegin;
1798   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1799   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1800   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1801   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 
1806 /*@C
1807     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1808 
1809    Not Collective
1810 
1811    Input Parameters:
1812 +  sname - name of a new method
1813 -  routine_create - routine to create method context
1814 
1815    Notes:
1816    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1817 
1818    Sample usage:
1819 .vb
1820    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1821 .ve
1822 
1823    Then, your mapping can be chosen with the procedural interface via
1824 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1825    or at runtime via the option
1826 $     -islocaltoglobalmapping_type my_mapper
1827 
1828    Level: advanced
1829 
1830 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1831 
1832 @*/
1833 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1834 {
1835   PetscErrorCode ierr;
1836 
1837   PetscFunctionBegin;
1838   ierr = ISInitializePackage();CHKERRQ(ierr);
1839   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1840   PetscFunctionReturn(0);
1841 }
1842 
1843 /*@C
1844    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1845 
1846    Logically Collective on ISLocalToGlobalMapping
1847 
1848    Input Parameters:
1849 +  ltog - the ISLocalToGlobalMapping object
1850 -  type - a known method
1851 
1852    Options Database Key:
1853 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1854     of available methods (for instance, basic or hash)
1855 
1856    Notes:
1857    See "petsc/include/petscis.h" for available methods
1858 
1859   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1860   then set the ISLocalToGlobalMapping type from the options database rather than by using
1861   this routine.
1862 
1863   Level: intermediate
1864 
1865   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1866   are accessed by ISLocalToGlobalMappingSetType().
1867 
1868 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1869 
1870 @*/
1871 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1872 {
1873   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1874   PetscBool      match;
1875 
1876   PetscFunctionBegin;
1877   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1878   PetscValidCharPointer(type,2);
1879 
1880   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1881   if (match) PetscFunctionReturn(0);
1882 
1883   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1884   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1885   /* Destroy the previous private LTOG context */
1886   if (ltog->ops->destroy) {
1887     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1888     ltog->ops->destroy = NULL;
1889   }
1890   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1891   ierr = (*r)(ltog);CHKERRQ(ierr);
1892   PetscFunctionReturn(0);
1893 }
1894 
1895 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1896 
1897 /*@C
1898   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1899 
1900   Not Collective
1901 
1902   Level: advanced
1903 
1904 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1905 @*/
1906 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1907 {
1908   PetscErrorCode ierr;
1909 
1910   PetscFunctionBegin;
1911   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1912   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1913 
1914   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1915   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1916   PetscFunctionReturn(0);
1917 }
1918 
1919