xref: /petsc/src/vec/is/utils/isltog.c (revision 1d36bdfd46c3877894678bf6fced88766689f9b6)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
20 {
21   PetscInt       numCells, step = 1;
22   PetscBool      isStride;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBeginHot;
26   *pStart = 0;
27   *points = NULL;
28   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
29   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
30   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
31   *pEnd   = *pStart + numCells;
32   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
33   PetscFunctionReturn(0);
34 }
35 
36 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
37 {
38   PetscInt       step = 1;
39   PetscBool      isStride;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBeginHot;
43   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
44   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
45   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
50 {
51   PetscErrorCode ierr;
52 
53   PetscFunctionBeginHot;
54   if (points) {
55     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
56     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
57   } else {
58     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
59     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 /* -----------------------------------------------------------------------------------------*/
65 
66 /*
67     Creates the global mapping information in the ISLocalToGlobalMapping structure
68 
69     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
70 */
71 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
72 {
73   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
74   PetscErrorCode ierr;
75 
76   PetscFunctionBegin;
77   if (mapping->data) PetscFunctionReturn(0);
78   end   = 0;
79   start = PETSC_MAX_INT;
80 
81   for (i=0; i<n; i++) {
82     if (idx[i] < 0) continue;
83     if (idx[i] < start) start = idx[i];
84     if (idx[i] > end)   end   = idx[i];
85   }
86   if (start > end) {start = 0; end = -1;}
87   mapping->globalstart = start;
88   mapping->globalend   = end;
89   if (!((PetscObject)mapping)->type_name) {
90     if ((end - start) > PetscMax(4*n,1000000)) {
91       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
92     } else {
93       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
94     }
95   }
96   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
101 {
102   PetscErrorCode              ierr;
103   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
104   ISLocalToGlobalMapping_Basic *map;
105 
106   PetscFunctionBegin;
107   start            = mapping->globalstart;
108   end              = mapping->globalend;
109   ierr             = PetscNew(&map);CHKERRQ(ierr);
110   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
111   map->globals     = globals;
112   for (i=0; i<end-start+1; i++) globals[i] = -1;
113   for (i=0; i<n; i++) {
114     if (idx[i] < 0) continue;
115     globals[idx[i] - start] = i;
116   }
117   mapping->data = (void*)map;
118   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
123 {
124   PetscErrorCode              ierr;
125   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
126   ISLocalToGlobalMapping_Hash *map;
127 
128   PetscFunctionBegin;
129   ierr = PetscNew(&map);CHKERRQ(ierr);
130   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
131   for (i=0; i<n; i++ ) {
132     if (idx[i] < 0) continue;
133     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
134   }
135   mapping->data = (void*)map;
136   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
141 {
142   PetscErrorCode              ierr;
143   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
144 
145   PetscFunctionBegin;
146   if (!map) PetscFunctionReturn(0);
147   ierr = PetscFree(map->globals);CHKERRQ(ierr);
148   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
153 {
154   PetscErrorCode              ierr;
155   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
156 
157   PetscFunctionBegin;
158   if (!map) PetscFunctionReturn(0);
159   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
160   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #define GTOLTYPE _Basic
165 #define GTOLNAME _Basic
166 #define GTOLBS mapping->bs
167 #define GTOL(g, local) do {                  \
168     local = map->globals[g/bs - start];      \
169     local = bs*local + (g % bs);             \
170   } while (0)
171 
172 #include <../src/vec/is/utils/isltog.h>
173 
174 #define GTOLTYPE _Basic
175 #define GTOLNAME Block_Basic
176 #define GTOLBS 1
177 #define GTOL(g, local) do {                  \
178     local = map->globals[g - start];         \
179   } while (0)
180 #include <../src/vec/is/utils/isltog.h>
181 
182 #define GTOLTYPE _Hash
183 #define GTOLNAME _Hash
184 #define GTOLBS mapping->bs
185 #define GTOL(g, local) do {                         \
186     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
187     local = bs*local + (g % bs);                    \
188    } while (0)
189 #include <../src/vec/is/utils/isltog.h>
190 
191 #define GTOLTYPE _Hash
192 #define GTOLNAME Block_Hash
193 #define GTOLBS 1
194 #define GTOL(g, local) do {                         \
195     (void)PetscHMapIGet(map->globalht,g,&local);    \
196   } while (0)
197 #include <../src/vec/is/utils/isltog.h>
198 
199 /*@
200     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
201 
202     Not Collective
203 
204     Input Parameter:
205 .   ltog - local to global mapping
206 
207     Output Parameter:
208 .   nltog - the duplicated local to global mapping
209 
210     Level: advanced
211 
212     Concepts: mapping^local to global
213 
214 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
215 @*/
216 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
222   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
223   PetscFunctionReturn(0);
224 }
225 
226 /*@
227     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
228 
229     Not Collective
230 
231     Input Parameter:
232 .   ltog - local to global mapping
233 
234     Output Parameter:
235 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
242 @*/
243 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
247   PetscValidIntPointer(n,2);
248   *n = mapping->bs*mapping->n;
249   PetscFunctionReturn(0);
250 }
251 
252 /*@C
253     ISLocalToGlobalMappingView - View a local to global mapping
254 
255     Not Collective
256 
257     Input Parameters:
258 +   ltog - local to global mapping
259 -   viewer - viewer
260 
261     Level: advanced
262 
263     Concepts: mapping^local to global
264 
265 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
266 @*/
267 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
268 {
269   PetscInt       i;
270   PetscMPIInt    rank;
271   PetscBool      iascii;
272   PetscErrorCode ierr;
273 
274   PetscFunctionBegin;
275   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
276   if (!viewer) {
277     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
278   }
279   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
280 
281   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
282   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
283   if (iascii) {
284     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
285     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
286     for (i=0; i<mapping->n; i++) {
287       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
288     }
289     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
290     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
291   }
292   PetscFunctionReturn(0);
293 }
294 
295 /*@
296     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
297     ordering and a global parallel ordering.
298 
299     Not collective
300 
301     Input Parameter:
302 .   is - index set containing the global numbers for each local number
303 
304     Output Parameter:
305 .   mapping - new mapping data structure
306 
307     Notes:
308     the block size of the IS determines the block size of the mapping
309     Level: advanced
310 
311     Concepts: mapping^local to global
312 
313 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
314 @*/
315 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
316 {
317   PetscErrorCode ierr;
318   PetscInt       n,bs;
319   const PetscInt *indices;
320   MPI_Comm       comm;
321   PetscBool      isblock;
322 
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(is,IS_CLASSID,1);
325   PetscValidPointer(mapping,2);
326 
327   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
328   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
329   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
330   if (!isblock) {
331     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
332     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
333     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
334   } else {
335     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
336     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
337     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
338     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
339   }
340   PetscFunctionReturn(0);
341 }
342 
343 /*@C
344     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
345     ordering and a global parallel ordering.
346 
347     Collective
348 
349     Input Parameter:
350 +   sf - star forest mapping contiguous local indices to (rank, offset)
351 -   start - first global index on this process
352 
353     Output Parameter:
354 .   mapping - new mapping data structure
355 
356     Level: advanced
357 
358     Concepts: mapping^local to global
359 
360 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
361 @*/
362 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
363 {
364   PetscErrorCode ierr;
365   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
366   const PetscInt *ilocal;
367   MPI_Comm       comm;
368 
369   PetscFunctionBegin;
370   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
371   PetscValidPointer(mapping,3);
372 
373   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
374   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
375   if (ilocal) {
376     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
377   }
378   else maxlocal = nleaves;
379   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
380   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
381   for (i=0; i<nroots; i++) globals[i] = start + i;
382   for (i=0; i<maxlocal; i++) ltog[i] = -1;
383   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
384   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
385   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
386   ierr = PetscFree(globals);CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 /*@
391     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
392 
393     Not collective
394 
395     Input Parameters:
396 .   mapping - mapping data structure
397 .   bs - the blocksize
398 
399     Level: advanced
400 
401     Concepts: mapping^local to global
402 
403 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
404 @*/
405 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
406 {
407   PetscInt       *nid;
408   const PetscInt *oid;
409   PetscInt       i,cn,on,obs,nn;
410   PetscErrorCode ierr;
411 
412   PetscFunctionBegin;
413   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
414   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
415   if (bs == mapping->bs) PetscFunctionReturn(0);
416   on  = mapping->n;
417   obs = mapping->bs;
418   oid = mapping->indices;
419   nn  = (on*obs)/bs;
420   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
421 
422   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
423   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
424   for (i=0;i<nn;i++) {
425     PetscInt j;
426     for (j=0,cn=0;j<bs-1;j++) {
427       if (oid[i*bs+j] < 0) { cn++; continue; }
428       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
429     }
430     if (oid[i*bs+j] < 0) cn++;
431     if (cn) {
432       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
433       nid[i] = -1;
434     } else {
435       nid[i] = oid[i*bs]/bs;
436     }
437   }
438   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
439 
440   mapping->n           = nn;
441   mapping->bs          = bs;
442   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
443   mapping->indices     = nid;
444   mapping->globalstart = 0;
445   mapping->globalend   = 0;
446 
447   /* reset the cached information */
448   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
449   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
450   if (mapping->info_indices) {
451     PetscInt i;
452 
453     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
454     for (i=1; i<mapping->info_nproc; i++) {
455       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
456     }
457     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
458   }
459   mapping->info_cached = PETSC_FALSE;
460 
461   if (mapping->ops->destroy) {
462     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 /*@
468     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
469     ordering and a global parallel ordering.
470 
471     Not Collective
472 
473     Input Parameters:
474 .   mapping - mapping data structure
475 
476     Output Parameter:
477 .   bs - the blocksize
478 
479     Level: advanced
480 
481     Concepts: mapping^local to global
482 
483 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
484 @*/
485 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
486 {
487   PetscFunctionBegin;
488   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
489   *bs = mapping->bs;
490   PetscFunctionReturn(0);
491 }
492 
493 /*@
494     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
495     ordering and a global parallel ordering.
496 
497     Not Collective, but communicator may have more than one process
498 
499     Input Parameters:
500 +   comm - MPI communicator
501 .   bs - the block size
502 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
503 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
504 -   mode - see PetscCopyMode
505 
506     Output Parameter:
507 .   mapping - new mapping data structure
508 
509     Notes:
510     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
511 
512     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
513     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
514     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
515 
516     Level: advanced
517 
518     Concepts: mapping^local to global
519 
520 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
521           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
522 @*/
523 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
524 {
525   PetscErrorCode ierr;
526   PetscInt       *in;
527 
528   PetscFunctionBegin;
529   if (n) PetscValidIntPointer(indices,3);
530   PetscValidPointer(mapping,4);
531 
532   *mapping = NULL;
533   ierr = ISInitializePackage();CHKERRQ(ierr);
534 
535   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
536                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
537   (*mapping)->n             = n;
538   (*mapping)->bs            = bs;
539   (*mapping)->info_cached   = PETSC_FALSE;
540   (*mapping)->info_free     = PETSC_FALSE;
541   (*mapping)->info_procs    = NULL;
542   (*mapping)->info_numprocs = NULL;
543   (*mapping)->info_indices  = NULL;
544   (*mapping)->info_nodec    = NULL;
545   (*mapping)->info_nodei    = NULL;
546 
547   (*mapping)->ops->globaltolocalmappingapply      = NULL;
548   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
549   (*mapping)->ops->destroy                        = NULL;
550   if (mode == PETSC_COPY_VALUES) {
551     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
552     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
553     (*mapping)->indices = in;
554     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
555   } else if (mode == PETSC_OWN_POINTER) {
556     (*mapping)->indices = (PetscInt*)indices;
557     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
558   }
559   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
560   PetscFunctionReturn(0);
561 }
562 
563 PetscFunctionList ISLocalToGlobalMappingList = NULL;
564 
565 
566 /*@
567    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
568 
569    Not collective
570 
571    Input Parameters:
572 .  mapping - mapping data structure
573 
574    Level: advanced
575 
576 @*/
577 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
578 {
579   PetscErrorCode             ierr;
580   char                       type[256];
581   ISLocalToGlobalMappingType defaulttype = "Not set";
582   PetscBool                  flg;
583 
584   PetscFunctionBegin;
585   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
586   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
587   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
588   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
589   if (flg) {
590     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
591   }
592   ierr = PetscOptionsEnd();CHKERRQ(ierr);
593   PetscFunctionReturn(0);
594 }
595 
596 /*@
597    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
598    ordering and a global parallel ordering.
599 
600    Note Collective
601 
602    Input Parameters:
603 .  mapping - mapping data structure
604 
605    Level: advanced
606 
607 .seealso: ISLocalToGlobalMappingCreate()
608 @*/
609 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
610 {
611   PetscErrorCode ierr;
612 
613   PetscFunctionBegin;
614   if (!*mapping) PetscFunctionReturn(0);
615   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
616   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
617   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
618   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
619   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
620   if ((*mapping)->info_indices) {
621     PetscInt i;
622 
623     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
624     for (i=1; i<(*mapping)->info_nproc; i++) {
625       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
626     }
627     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
628   }
629   if ((*mapping)->info_nodei) {
630     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
631   }
632   ierr = PetscFree((*mapping)->info_nodei);CHKERRQ(ierr);
633   ierr = PetscFree((*mapping)->info_nodec);CHKERRQ(ierr);
634   if ((*mapping)->ops->destroy) {
635     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
636   }
637   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
638   *mapping = 0;
639   PetscFunctionReturn(0);
640 }
641 
642 /*@
643     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
644     a new index set using the global numbering defined in an ISLocalToGlobalMapping
645     context.
646 
647     Not collective
648 
649     Input Parameters:
650 +   mapping - mapping between local and global numbering
651 -   is - index set in local numbering
652 
653     Output Parameters:
654 .   newis - index set in global numbering
655 
656     Level: advanced
657 
658     Concepts: mapping^local to global
659 
660 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
661           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
662 @*/
663 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
664 {
665   PetscErrorCode ierr;
666   PetscInt       n,*idxout;
667   const PetscInt *idxin;
668 
669   PetscFunctionBegin;
670   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
671   PetscValidHeaderSpecific(is,IS_CLASSID,2);
672   PetscValidPointer(newis,3);
673 
674   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
675   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
676   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
677   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
678   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
679   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
680   PetscFunctionReturn(0);
681 }
682 
683 /*@
684    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
685    and converts them to the global numbering.
686 
687    Not collective
688 
689    Input Parameters:
690 +  mapping - the local to global mapping context
691 .  N - number of integers
692 -  in - input indices in local numbering
693 
694    Output Parameter:
695 .  out - indices in global numbering
696 
697    Notes:
698    The in and out array parameters may be identical.
699 
700    Level: advanced
701 
702 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
703           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
704           AOPetscToApplication(), ISGlobalToLocalMappingApply()
705 
706     Concepts: mapping^local to global
707 @*/
708 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
709 {
710   PetscInt i,bs,Nmax;
711 
712   PetscFunctionBegin;
713   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
714   bs   = mapping->bs;
715   Nmax = bs*mapping->n;
716   if (bs == 1) {
717     const PetscInt *idx = mapping->indices;
718     for (i=0; i<N; i++) {
719       if (in[i] < 0) {
720         out[i] = in[i];
721         continue;
722       }
723       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
724       out[i] = idx[in[i]];
725     }
726   } else {
727     const PetscInt *idx = mapping->indices;
728     for (i=0; i<N; i++) {
729       if (in[i] < 0) {
730         out[i] = in[i];
731         continue;
732       }
733       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
734       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
735     }
736   }
737   PetscFunctionReturn(0);
738 }
739 
740 /*@
741    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
742 
743    Not collective
744 
745    Input Parameters:
746 +  mapping - the local to global mapping context
747 .  N - number of integers
748 -  in - input indices in local block numbering
749 
750    Output Parameter:
751 .  out - indices in global block numbering
752 
753    Notes:
754    The in and out array parameters may be identical.
755 
756    Example:
757      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
758      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
759 
760    Level: advanced
761 
762 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
763           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
764           AOPetscToApplication(), ISGlobalToLocalMappingApply()
765 
766     Concepts: mapping^local to global
767 @*/
768 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
769 {
770 
771   PetscFunctionBegin;
772   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
773   {
774     PetscInt i,Nmax = mapping->n;
775     const PetscInt *idx = mapping->indices;
776 
777     for (i=0; i<N; i++) {
778       if (in[i] < 0) {
779         out[i] = in[i];
780         continue;
781       }
782       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
783       out[i] = idx[in[i]];
784     }
785   }
786   PetscFunctionReturn(0);
787 }
788 
789 /*@
790     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
791     specified with a global numbering.
792 
793     Not collective
794 
795     Input Parameters:
796 +   mapping - mapping between local and global numbering
797 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
798            IS_GTOLM_DROP - drops the indices with no local value from the output list
799 .   n - number of global indices to map
800 -   idx - global indices to map
801 
802     Output Parameters:
803 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
804 -   idxout - local index of each global index, one must pass in an array long enough
805              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
806              idxout == NULL to determine the required length (returned in nout)
807              and then allocate the required space and call ISGlobalToLocalMappingApply()
808              a second time to set the values.
809 
810     Notes:
811     Either nout or idxout may be NULL. idx and idxout may be identical.
812 
813     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
814     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
815     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
816 
817     Level: advanced
818 
819     Developer Note: The manual page states that idx and idxout may be identical but the calling
820        sequence declares idx as const so it cannot be the same as idxout.
821 
822     Concepts: mapping^global to local
823 
824 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
825           ISLocalToGlobalMappingDestroy()
826 @*/
827 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
828 {
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
833   if (!mapping->data) {
834     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
835   }
836   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 /*@
841     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
842     a new index set using the local numbering defined in an ISLocalToGlobalMapping
843     context.
844 
845     Not collective
846 
847     Input Parameters:
848 +   mapping - mapping between local and global numbering
849 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
850            IS_GTOLM_DROP - drops the indices with no local value from the output list
851 -   is - index set in global numbering
852 
853     Output Parameters:
854 .   newis - index set in local numbering
855 
856     Level: advanced
857 
858     Concepts: mapping^local to global
859 
860 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
861           ISLocalToGlobalMappingDestroy()
862 @*/
863 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
864 {
865   PetscErrorCode ierr;
866   PetscInt       n,nout,*idxout;
867   const PetscInt *idxin;
868 
869   PetscFunctionBegin;
870   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
871   PetscValidHeaderSpecific(is,IS_CLASSID,3);
872   PetscValidPointer(newis,4);
873 
874   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
875   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
876   if (type == IS_GTOLM_MASK) {
877     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
878   } else {
879     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
880     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
881   }
882   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
883   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
884   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
885   PetscFunctionReturn(0);
886 }
887 
888 /*@
889     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
890     specified with a block global numbering.
891 
892     Not collective
893 
894     Input Parameters:
895 +   mapping - mapping between local and global numbering
896 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
897            IS_GTOLM_DROP - drops the indices with no local value from the output list
898 .   n - number of global indices to map
899 -   idx - global indices to map
900 
901     Output Parameters:
902 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
903 -   idxout - local index of each global index, one must pass in an array long enough
904              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
905              idxout == NULL to determine the required length (returned in nout)
906              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
907              a second time to set the values.
908 
909     Notes:
910     Either nout or idxout may be NULL. idx and idxout may be identical.
911 
912     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
913     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
914     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
915 
916     Level: advanced
917 
918     Developer Note: The manual page states that idx and idxout may be identical but the calling
919        sequence declares idx as const so it cannot be the same as idxout.
920 
921     Concepts: mapping^global to local
922 
923 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
924           ISLocalToGlobalMappingDestroy()
925 @*/
926 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
927                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
928 {
929   PetscErrorCode ierr;
930 
931   PetscFunctionBegin;
932   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
933   if (!mapping->data) {
934     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
935   }
936   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
937   PetscFunctionReturn(0);
938 }
939 
940 
941 /*@C
942     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
943      each index shared by more than one processor
944 
945     Collective on ISLocalToGlobalMapping
946 
947     Input Parameters:
948 .   mapping - the mapping from local to global indexing
949 
950     Output Parameter:
951 +   nproc - number of processors that are connected to this one
952 .   proc - neighboring processors
953 .   numproc - number of indices for each subdomain (processor)
954 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
955 
956     Level: advanced
957 
958     Concepts: mapping^local to global
959 
960     Fortran Usage:
961 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
962 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
963           PetscInt indices[nproc][numprocmax],ierr)
964         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
965         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
966 
967 
968 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
969           ISLocalToGlobalMappingRestoreInfo()
970 @*/
971 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
972 {
973   PetscErrorCode ierr;
974 
975   PetscFunctionBegin;
976   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
977   if (mapping->info_cached) {
978     *nproc    = mapping->info_nproc;
979     *procs    = mapping->info_procs;
980     *numprocs = mapping->info_numprocs;
981     *indices  = mapping->info_indices;
982   } else {
983     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
984   }
985   PetscFunctionReturn(0);
986 }
987 
988 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
989 {
990   PetscErrorCode ierr;
991   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
992   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
993   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
994   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
995   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
996   PetscInt       first_procs,first_numprocs,*first_indices;
997   MPI_Request    *recv_waits,*send_waits;
998   MPI_Status     recv_status,*send_status,*recv_statuses;
999   MPI_Comm       comm;
1000   PetscBool      debug = PETSC_FALSE;
1001 
1002   PetscFunctionBegin;
1003   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1004   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1005   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1006   if (size == 1) {
1007     *nproc         = 0;
1008     *procs         = NULL;
1009     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1010     (*numprocs)[0] = 0;
1011     ierr           = PetscNew(indices);CHKERRQ(ierr);
1012     (*indices)[0]  = NULL;
1013     /* save info for reuse */
1014     mapping->info_nproc = *nproc;
1015     mapping->info_procs = *procs;
1016     mapping->info_numprocs = *numprocs;
1017     mapping->info_indices = *indices;
1018     mapping->info_cached = PETSC_TRUE;
1019     PetscFunctionReturn(0);
1020   }
1021 
1022   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1023 
1024   /*
1025     Notes on ISLocalToGlobalMappingGetBlockInfo
1026 
1027     globally owned node - the nodes that have been assigned to this processor in global
1028            numbering, just for this routine.
1029 
1030     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1031            boundary (i.e. is has more than one local owner)
1032 
1033     locally owned node - node that exists on this processors subdomain
1034 
1035     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1036            local subdomain
1037   */
1038   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1039   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1040   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1041 
1042   for (i=0; i<n; i++) {
1043     if (lindices[i] > max) max = lindices[i];
1044   }
1045   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1046   Ng++;
1047   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1048   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1049   scale  = Ng/size + 1;
1050   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1051   rstart = scale*rank;
1052 
1053   /* determine ownership ranges of global indices */
1054   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1055   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1056 
1057   /* determine owners of each local node  */
1058   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1059   for (i=0; i<n; i++) {
1060     proc             = lindices[i]/scale; /* processor that globally owns this index */
1061     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1062     owner[i]         = proc;
1063     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1064   }
1065   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1066   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1067 
1068   /* inform other processors of number of messages and max length*/
1069   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1070   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1071 
1072   /* post receives for owned rows */
1073   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1074   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1075   for (i=0; i<nrecvs; i++) {
1076     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1077   }
1078 
1079   /* pack messages containing lists of local nodes to owners */
1080   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1081   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1082   starts[0] = 0;
1083   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1084   for (i=0; i<n; i++) {
1085     sends[starts[owner[i]]++] = lindices[i];
1086     sends[starts[owner[i]]++] = i;
1087   }
1088   ierr = PetscFree(owner);CHKERRQ(ierr);
1089   starts[0] = 0;
1090   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1091 
1092   /* send the messages */
1093   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1094   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1095   cnt = 0;
1096   for (i=0; i<size; i++) {
1097     if (nprocs[2*i]) {
1098       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1099       dest[cnt] = i;
1100       cnt++;
1101     }
1102   }
1103   ierr = PetscFree(starts);CHKERRQ(ierr);
1104 
1105   /* wait on receives */
1106   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1107   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1108   cnt  = nrecvs;
1109   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1110   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1111   while (cnt) {
1112     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1113     /* unpack receives into our local space */
1114     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1115     source[imdex] = recv_status.MPI_SOURCE;
1116     len[imdex]    = len[imdex]/2;
1117     /* count how many local owners for each of my global owned indices */
1118     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1119     cnt--;
1120   }
1121   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1122 
1123   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1124   nowned  = 0;
1125   nownedm = 0;
1126   for (i=0; i<ng; i++) {
1127     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1128   }
1129 
1130   /* create single array to contain rank of all local owners of each globally owned index */
1131   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1132   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1133   starts[0] = 0;
1134   for (i=1; i<ng; i++) {
1135     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1136     else starts[i] = starts[i-1];
1137   }
1138 
1139   /* for each nontrival globally owned node list all arriving processors */
1140   for (i=0; i<nrecvs; i++) {
1141     for (j=0; j<len[i]; j++) {
1142       node = recvs[2*i*nmax+2*j]-rstart;
1143       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1144     }
1145   }
1146 
1147   if (debug) { /* -----------------------------------  */
1148     starts[0] = 0;
1149     for (i=1; i<ng; i++) {
1150       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1151       else starts[i] = starts[i-1];
1152     }
1153     for (i=0; i<ng; i++) {
1154       if (nownedsenders[i] > 1) {
1155         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1156         for (j=0; j<nownedsenders[i]; j++) {
1157           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1158         }
1159         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1160       }
1161     }
1162     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1163   } /* -----------------------------------  */
1164 
1165   /* wait on original sends */
1166   if (nsends) {
1167     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1168     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1169     ierr = PetscFree(send_status);CHKERRQ(ierr);
1170   }
1171   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1172   ierr = PetscFree(sends);CHKERRQ(ierr);
1173   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1174 
1175   /* pack messages to send back to local owners */
1176   starts[0] = 0;
1177   for (i=1; i<ng; i++) {
1178     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1179     else starts[i] = starts[i-1];
1180   }
1181   nsends2 = nrecvs;
1182   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1183   for (i=0; i<nrecvs; i++) {
1184     nprocs[i] = 1;
1185     for (j=0; j<len[i]; j++) {
1186       node = recvs[2*i*nmax+2*j]-rstart;
1187       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1188     }
1189   }
1190   nt = 0;
1191   for (i=0; i<nsends2; i++) nt += nprocs[i];
1192 
1193   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1194   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1195 
1196   starts2[0] = 0;
1197   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1198   /*
1199      Each message is 1 + nprocs[i] long, and consists of
1200        (0) the number of nodes being sent back
1201        (1) the local node number,
1202        (2) the number of processors sharing it,
1203        (3) the processors sharing it
1204   */
1205   for (i=0; i<nsends2; i++) {
1206     cnt = 1;
1207     sends2[starts2[i]] = 0;
1208     for (j=0; j<len[i]; j++) {
1209       node = recvs[2*i*nmax+2*j]-rstart;
1210       if (nownedsenders[node] > 1) {
1211         sends2[starts2[i]]++;
1212         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1213         sends2[starts2[i]+cnt++] = nownedsenders[node];
1214         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1215         cnt += nownedsenders[node];
1216       }
1217     }
1218   }
1219 
1220   /* receive the message lengths */
1221   nrecvs2 = nsends;
1222   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1223   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1224   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1225   for (i=0; i<nrecvs2; i++) {
1226     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1227   }
1228 
1229   /* send the message lengths */
1230   for (i=0; i<nsends2; i++) {
1231     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1232   }
1233 
1234   /* wait on receives of lens */
1235   if (nrecvs2) {
1236     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1237     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1238     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1239   }
1240   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1241 
1242   starts3[0] = 0;
1243   nt         = 0;
1244   for (i=0; i<nrecvs2-1; i++) {
1245     starts3[i+1] = starts3[i] + lens2[i];
1246     nt          += lens2[i];
1247   }
1248   if (nrecvs2) nt += lens2[nrecvs2-1];
1249 
1250   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1251   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1252   for (i=0; i<nrecvs2; i++) {
1253     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1254   }
1255 
1256   /* send the messages */
1257   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1258   for (i=0; i<nsends2; i++) {
1259     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1260   }
1261 
1262   /* wait on receives */
1263   if (nrecvs2) {
1264     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1265     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1266     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1267   }
1268   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1269   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1270 
1271   if (debug) { /* -----------------------------------  */
1272     cnt = 0;
1273     for (i=0; i<nrecvs2; i++) {
1274       nt = recvs2[cnt++];
1275       for (j=0; j<nt; j++) {
1276         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1277         for (k=0; k<recvs2[cnt+1]; k++) {
1278           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1279         }
1280         cnt += 2 + recvs2[cnt+1];
1281         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1282       }
1283     }
1284     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1285   } /* -----------------------------------  */
1286 
1287   /* count number subdomains for each local node */
1288   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1289   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1290   cnt  = 0;
1291   for (i=0; i<nrecvs2; i++) {
1292     nt = recvs2[cnt++];
1293     for (j=0; j<nt; j++) {
1294       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1295       cnt += 2 + recvs2[cnt+1];
1296     }
1297   }
1298   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1299   *nproc    = nt;
1300   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1301   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1302   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1303   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1304   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1305   cnt  = 0;
1306   for (i=0; i<size; i++) {
1307     if (nprocs[i] > 0) {
1308       bprocs[i]        = cnt;
1309       (*procs)[cnt]    = i;
1310       (*numprocs)[cnt] = nprocs[i];
1311       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1312       cnt++;
1313     }
1314   }
1315 
1316   /* make the list of subdomains for each nontrivial local node */
1317   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1318   cnt  = 0;
1319   for (i=0; i<nrecvs2; i++) {
1320     nt = recvs2[cnt++];
1321     for (j=0; j<nt; j++) {
1322       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1323       cnt += 2 + recvs2[cnt+1];
1324     }
1325   }
1326   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1327   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1328 
1329   /* sort the node indexing by their global numbers */
1330   nt = *nproc;
1331   for (i=0; i<nt; i++) {
1332     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1333     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1334     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1335     ierr = PetscFree(tmp);CHKERRQ(ierr);
1336   }
1337 
1338   if (debug) { /* -----------------------------------  */
1339     nt = *nproc;
1340     for (i=0; i<nt; i++) {
1341       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1342       for (j=0; j<(*numprocs)[i]; j++) {
1343         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1344       }
1345       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1346     }
1347     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1348   } /* -----------------------------------  */
1349 
1350   /* wait on sends */
1351   if (nsends2) {
1352     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1353     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1354     ierr = PetscFree(send_status);CHKERRQ(ierr);
1355   }
1356 
1357   ierr = PetscFree(starts3);CHKERRQ(ierr);
1358   ierr = PetscFree(dest);CHKERRQ(ierr);
1359   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1360 
1361   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1362   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1363   ierr = PetscFree(starts);CHKERRQ(ierr);
1364   ierr = PetscFree(starts2);CHKERRQ(ierr);
1365   ierr = PetscFree(lens2);CHKERRQ(ierr);
1366 
1367   ierr = PetscFree(source);CHKERRQ(ierr);
1368   ierr = PetscFree(len);CHKERRQ(ierr);
1369   ierr = PetscFree(recvs);CHKERRQ(ierr);
1370   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1371   ierr = PetscFree(sends2);CHKERRQ(ierr);
1372 
1373   /* put the information about myself as the first entry in the list */
1374   first_procs    = (*procs)[0];
1375   first_numprocs = (*numprocs)[0];
1376   first_indices  = (*indices)[0];
1377   for (i=0; i<*nproc; i++) {
1378     if ((*procs)[i] == rank) {
1379       (*procs)[0]    = (*procs)[i];
1380       (*numprocs)[0] = (*numprocs)[i];
1381       (*indices)[0]  = (*indices)[i];
1382       (*procs)[i]    = first_procs;
1383       (*numprocs)[i] = first_numprocs;
1384       (*indices)[i]  = first_indices;
1385       break;
1386     }
1387   }
1388 
1389   /* save info for reuse */
1390   mapping->info_nproc = *nproc;
1391   mapping->info_procs = *procs;
1392   mapping->info_numprocs = *numprocs;
1393   mapping->info_indices = *indices;
1394   mapping->info_cached = PETSC_TRUE;
1395   PetscFunctionReturn(0);
1396 }
1397 
1398 /*@C
1399     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1400 
1401     Collective on ISLocalToGlobalMapping
1402 
1403     Input Parameters:
1404 .   mapping - the mapping from local to global indexing
1405 
1406     Output Parameter:
1407 +   nproc - number of processors that are connected to this one
1408 .   proc - neighboring processors
1409 .   numproc - number of indices for each processor
1410 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1411 
1412     Level: advanced
1413 
1414 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1415           ISLocalToGlobalMappingGetInfo()
1416 @*/
1417 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1418 {
1419   PetscErrorCode ierr;
1420 
1421   PetscFunctionBegin;
1422   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1423   if (mapping->info_free) {
1424     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1425     if (*indices) {
1426       PetscInt i;
1427 
1428       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1429       for (i=1; i<*nproc; i++) {
1430         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1431       }
1432       ierr = PetscFree(*indices);CHKERRQ(ierr);
1433     }
1434   }
1435   *nproc    = 0;
1436   *procs    = NULL;
1437   *numprocs = NULL;
1438   *indices  = NULL;
1439   PetscFunctionReturn(0);
1440 }
1441 
1442 /*@C
1443     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1444      each index shared by more than one processor
1445 
1446     Collective on ISLocalToGlobalMapping
1447 
1448     Input Parameters:
1449 .   mapping - the mapping from local to global indexing
1450 
1451     Output Parameter:
1452 +   nproc - number of processors that are connected to this one
1453 .   proc - neighboring processors
1454 .   numproc - number of indices for each subdomain (processor)
1455 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1456 
1457     Level: advanced
1458 
1459     Concepts: mapping^local to global
1460 
1461     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1462 
1463     Fortran Usage:
1464 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1465 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1466           PetscInt indices[nproc][numprocmax],ierr)
1467         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1468         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1469 
1470 
1471 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1472           ISLocalToGlobalMappingRestoreInfo()
1473 @*/
1474 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1475 {
1476   PetscErrorCode ierr;
1477   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1478 
1479   PetscFunctionBegin;
1480   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1481   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1482   if (bs > 1) { /* we need to expand the cached info */
1483     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1484     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1485     for (i=0; i<*nproc; i++) {
1486       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1487       for (j=0; j<bnumprocs[i]; j++) {
1488         for (k=0; k<bs; k++) {
1489           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1490         }
1491       }
1492       (*numprocs)[i] = bnumprocs[i]*bs;
1493     }
1494     mapping->info_free = PETSC_TRUE;
1495   } else {
1496     *numprocs = bnumprocs;
1497     *indices  = bindices;
1498   }
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 /*@C
1503     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1504 
1505     Collective on ISLocalToGlobalMapping
1506 
1507     Input Parameters:
1508 .   mapping - the mapping from local to global indexing
1509 
1510     Output Parameter:
1511 +   nproc - number of processors that are connected to this one
1512 .   proc - neighboring processors
1513 .   numproc - number of indices for each processor
1514 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1515 
1516     Level: advanced
1517 
1518 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1519           ISLocalToGlobalMappingGetInfo()
1520 @*/
1521 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1522 {
1523   PetscErrorCode ierr;
1524 
1525   PetscFunctionBegin;
1526   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1527   PetscFunctionReturn(0);
1528 }
1529 
1530 /*@C
1531     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1532 
1533     Collective on ISLocalToGlobalMapping
1534 
1535     Input Parameters:
1536 .   mapping - the mapping from local to global indexing
1537 
1538     Output Parameter:
1539 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1540 .   count - number of neighboring processors per node
1541 -   indices - indices of processes sharing the node (sorted)
1542 
1543     Level: advanced
1544 
1545     Concepts: mapping^local to global
1546 
1547     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1548 
1549 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1550           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1551 @*/
1552 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1553 {
1554   PetscInt       n;
1555   PetscErrorCode ierr;
1556 
1557   PetscFunctionBegin;
1558   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1559   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1560   if (!mapping->info_nodec) {
1561     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1562 
1563     ierr = PetscCalloc1(n+1,&mapping->info_nodec);CHKERRQ(ierr); /* always allocate to flag setup */
1564     ierr = PetscMalloc1(n,&mapping->info_nodei);CHKERRQ(ierr);
1565     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1566     for (i=0,m=0;i<n;i++) { mapping->info_nodec[i] = 1; m++; }
1567     for (i=1;i<n_neigh;i++) {
1568       PetscInt j;
1569 
1570       m += n_shared[i];
1571       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1572     }
1573     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1574     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1575     ierr = PetscMemzero(mapping->info_nodec,n*sizeof(PetscInt));CHKERRQ(ierr);
1576     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1577     for (i=1;i<n_neigh;i++) {
1578       PetscInt j;
1579 
1580       for (j=0;j<n_shared[i];j++) {
1581         PetscInt k = shared[i][j];
1582 
1583         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1584         mapping->info_nodec[k] += 1;
1585       }
1586     }
1587     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1588     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1589   }
1590   if (nnodes)  *nnodes  = n;
1591   if (count)   *count   = mapping->info_nodec;
1592   if (indices) *indices = mapping->info_nodei;
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 /*@C
1597     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1598 
1599     Collective on ISLocalToGlobalMapping
1600 
1601     Input Parameters:
1602 .   mapping - the mapping from local to global indexing
1603 
1604     Output Parameter:
1605 +   nnodes - number of local nodes
1606 .   count - number of neighboring processors per node
1607 -   indices - indices of processes sharing the node (sorted)
1608 
1609     Level: advanced
1610 
1611 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1612           ISLocalToGlobalMappingGetInfo()
1613 @*/
1614 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1615 {
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1618   if (nnodes)  *nnodes  = 0;
1619   if (count)   *count   = NULL;
1620   if (indices) *indices = NULL;
1621   PetscFunctionReturn(0);
1622 }
1623 
1624 /*@C
1625    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1626 
1627    Not Collective
1628 
1629    Input Arguments:
1630 . ltog - local to global mapping
1631 
1632    Output Arguments:
1633 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1634 
1635    Level: advanced
1636 
1637    Notes:
1638     ISLocalToGlobalMappingGetSize() returns the length the this array
1639 
1640 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1641 @*/
1642 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1643 {
1644   PetscFunctionBegin;
1645   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1646   PetscValidPointer(array,2);
1647   if (ltog->bs == 1) {
1648     *array = ltog->indices;
1649   } else {
1650     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1651     const PetscInt *ii;
1652     PetscErrorCode ierr;
1653 
1654     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1655     *array = jj;
1656     k    = 0;
1657     ii   = ltog->indices;
1658     for (i=0; i<n; i++)
1659       for (j=0; j<bs; j++)
1660         jj[k++] = bs*ii[i] + j;
1661   }
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 /*@C
1666    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1667 
1668    Not Collective
1669 
1670    Input Arguments:
1671 + ltog - local to global mapping
1672 - array - array of indices
1673 
1674    Level: advanced
1675 
1676 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1677 @*/
1678 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1679 {
1680   PetscFunctionBegin;
1681   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1682   PetscValidPointer(array,2);
1683   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1684 
1685   if (ltog->bs > 1) {
1686     PetscErrorCode ierr;
1687     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1688   }
1689   PetscFunctionReturn(0);
1690 }
1691 
1692 /*@C
1693    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1694 
1695    Not Collective
1696 
1697    Input Arguments:
1698 . ltog - local to global mapping
1699 
1700    Output Arguments:
1701 . array - array of indices
1702 
1703    Level: advanced
1704 
1705 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1706 @*/
1707 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1708 {
1709   PetscFunctionBegin;
1710   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1711   PetscValidPointer(array,2);
1712   *array = ltog->indices;
1713   PetscFunctionReturn(0);
1714 }
1715 
1716 /*@C
1717    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1718 
1719    Not Collective
1720 
1721    Input Arguments:
1722 + ltog - local to global mapping
1723 - array - array of indices
1724 
1725    Level: advanced
1726 
1727 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1728 @*/
1729 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1730 {
1731   PetscFunctionBegin;
1732   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1733   PetscValidPointer(array,2);
1734   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1735   *array = NULL;
1736   PetscFunctionReturn(0);
1737 }
1738 
1739 /*@C
1740    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1741 
1742    Not Collective
1743 
1744    Input Arguments:
1745 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1746 . n - number of mappings to concatenate
1747 - ltogs - local to global mappings
1748 
1749    Output Arguments:
1750 . ltogcat - new mapping
1751 
1752    Note: this currently always returns a mapping with block size of 1
1753 
1754    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1755 
1756    Level: advanced
1757 
1758 .seealso: ISLocalToGlobalMappingCreate()
1759 @*/
1760 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1761 {
1762   PetscInt       i,cnt,m,*idx;
1763   PetscErrorCode ierr;
1764 
1765   PetscFunctionBegin;
1766   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1767   if (n > 0) PetscValidPointer(ltogs,3);
1768   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1769   PetscValidPointer(ltogcat,4);
1770   for (cnt=0,i=0; i<n; i++) {
1771     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1772     cnt += m;
1773   }
1774   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1775   for (cnt=0,i=0; i<n; i++) {
1776     const PetscInt *subidx;
1777     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1778     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1779     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1780     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1781     cnt += m;
1782   }
1783   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1784   PetscFunctionReturn(0);
1785 }
1786 
1787 /*MC
1788       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1789                                     used this is good for only small and moderate size problems.
1790 
1791    Options Database Keys:
1792 +   -islocaltoglobalmapping_type basic - select this method
1793 
1794    Level: beginner
1795 
1796 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1797 M*/
1798 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1799 {
1800   PetscFunctionBegin;
1801   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1802   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1803   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1804   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 /*MC
1809       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1810                                     used this is good for large memory problems.
1811 
1812    Options Database Keys:
1813 +   -islocaltoglobalmapping_type hash - select this method
1814 
1815 
1816    Notes:
1817     This is selected automatically for large problems if the user does not set the type.
1818 
1819    Level: beginner
1820 
1821 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1822 M*/
1823 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1824 {
1825   PetscFunctionBegin;
1826   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1827   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1828   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1829   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1830   PetscFunctionReturn(0);
1831 }
1832 
1833 
1834 /*@C
1835     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1836 
1837    Not Collective
1838 
1839    Input Parameters:
1840 +  sname - name of a new method
1841 -  routine_create - routine to create method context
1842 
1843    Notes:
1844    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1845 
1846    Sample usage:
1847 .vb
1848    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1849 .ve
1850 
1851    Then, your mapping can be chosen with the procedural interface via
1852 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1853    or at runtime via the option
1854 $     -islocaltoglobalmapping_type my_mapper
1855 
1856    Level: advanced
1857 
1858 .keywords: ISLocalToGlobalMappingType, register
1859 
1860 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1861 
1862 @*/
1863 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1864 {
1865   PetscErrorCode ierr;
1866 
1867   PetscFunctionBegin;
1868   ierr = ISInitializePackage();CHKERRQ(ierr);
1869   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1870   PetscFunctionReturn(0);
1871 }
1872 
1873 /*@C
1874    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1875 
1876    Logically Collective on ISLocalToGlobalMapping
1877 
1878    Input Parameters:
1879 +  ltog - the ISLocalToGlobalMapping object
1880 -  type - a known method
1881 
1882    Options Database Key:
1883 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1884     of available methods (for instance, basic or hash)
1885 
1886    Notes:
1887    See "petsc/include/petscis.h" for available methods
1888 
1889   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1890   then set the ISLocalToGlobalMapping type from the options database rather than by using
1891   this routine.
1892 
1893   Level: intermediate
1894 
1895   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1896   are accessed by ISLocalToGlobalMappingSetType().
1897 
1898 .keywords: ISLocalToGlobalMapping, set, method
1899 
1900 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1901 
1902 @*/
1903 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1904 {
1905   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1906   PetscBool      match;
1907 
1908   PetscFunctionBegin;
1909   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1910   PetscValidCharPointer(type,2);
1911 
1912   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1913   if (match) PetscFunctionReturn(0);
1914 
1915   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1916   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1917   /* Destroy the previous private LTOG context */
1918   if (ltog->ops->destroy) {
1919     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1920     ltog->ops->destroy = NULL;
1921   }
1922   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1923   ierr = (*r)(ltog);CHKERRQ(ierr);
1924   PetscFunctionReturn(0);
1925 }
1926 
1927 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1928 
1929 /*@C
1930   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1931 
1932   Not Collective
1933 
1934   Level: advanced
1935 
1936 .keywords: IS, register, all
1937 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1938 @*/
1939 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1940 {
1941   PetscErrorCode ierr;
1942 
1943   PetscFunctionBegin;
1944   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1945   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1946 
1947   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1948   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1949   PetscFunctionReturn(0);
1950 }
1951 
1952