xref: /petsc/src/vec/is/utils/isltog.c (revision 36a9e3b9f6565ce1252c167e0dc4a4cf71b0f2ec)
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petsc/private/hashmapi.h>
4 #include <petscsf.h>
5 #include <petscviewer.h>
6 
7 PetscClassId IS_LTOGM_CLASSID;
8 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
9 
10 typedef struct {
11   PetscInt *globals;
12 } ISLocalToGlobalMapping_Basic;
13 
14 typedef struct {
15   PetscHMapI globalht;
16 } ISLocalToGlobalMapping_Hash;
17 
18 
19 PetscErrorCode ISGetPointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
20 {
21   PetscInt       numCells, step = 1;
22   PetscBool      isStride;
23   PetscErrorCode ierr;
24 
25   PetscFunctionBeginHot;
26   *pStart = 0;
27   *points = NULL;
28   ierr = ISGetLocalSize(pointIS, &numCells);CHKERRQ(ierr);
29   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
30   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
31   *pEnd   = *pStart + numCells;
32   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
33   PetscFunctionReturn(0);
34 }
35 
36 PetscErrorCode ISRestorePointRange(IS pointIS, PetscInt *pStart, PetscInt *pEnd, const PetscInt **points)
37 {
38   PetscInt       step = 1;
39   PetscBool      isStride;
40   PetscErrorCode ierr;
41 
42   PetscFunctionBeginHot;
43   ierr = PetscObjectTypeCompare((PetscObject) pointIS, ISSTRIDE, &isStride);CHKERRQ(ierr);
44   if (isStride) {ierr = ISStrideGetInfo(pointIS, pStart, &step);CHKERRQ(ierr);}
45   if (!isStride || step != 1) {ierr = ISGetIndices(pointIS, points);CHKERRQ(ierr);}
46   PetscFunctionReturn(0);
47 }
48 
49 PetscErrorCode ISGetPointSubrange(IS subpointIS, PetscInt pStart, PetscInt pEnd, const PetscInt *points)
50 {
51   PetscErrorCode ierr;
52 
53   PetscFunctionBeginHot;
54   if (points) {
55     ierr = ISSetType(subpointIS, ISGENERAL);CHKERRQ(ierr);
56     ierr = ISGeneralSetIndices(subpointIS, pEnd-pStart, &points[pStart], PETSC_USE_POINTER);CHKERRQ(ierr);
57   } else {
58     ierr = ISSetType(subpointIS, ISSTRIDE);CHKERRQ(ierr);
59     ierr = ISStrideSetStride(subpointIS, pEnd-pStart, pStart, 1);CHKERRQ(ierr);
60   }
61   PetscFunctionReturn(0);
62 }
63 
64 /* -----------------------------------------------------------------------------------------*/
65 
66 /*
67     Creates the global mapping information in the ISLocalToGlobalMapping structure
68 
69     If the user has not selected how to handle the global to local mapping then use HASH for "large" problems
70 */
71 static PetscErrorCode ISGlobalToLocalMappingSetUp(ISLocalToGlobalMapping mapping)
72 {
73   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start;
74   PetscErrorCode ierr;
75 
76   PetscFunctionBegin;
77   if (mapping->data) PetscFunctionReturn(0);
78   end   = 0;
79   start = PETSC_MAX_INT;
80 
81   for (i=0; i<n; i++) {
82     if (idx[i] < 0) continue;
83     if (idx[i] < start) start = idx[i];
84     if (idx[i] > end)   end   = idx[i];
85   }
86   if (start > end) {start = 0; end = -1;}
87   mapping->globalstart = start;
88   mapping->globalend   = end;
89   if (!((PetscObject)mapping)->type_name) {
90     if ((end - start) > PetscMax(4*n,1000000)) {
91       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGHASH);CHKERRQ(ierr);
92     } else {
93       ierr = ISLocalToGlobalMappingSetType(mapping,ISLOCALTOGLOBALMAPPINGBASIC);CHKERRQ(ierr);
94     }
95   }
96   ierr = (*mapping->ops->globaltolocalmappingsetup)(mapping);CHKERRQ(ierr);
97   PetscFunctionReturn(0);
98 }
99 
100 static PetscErrorCode ISGlobalToLocalMappingSetUp_Basic(ISLocalToGlobalMapping mapping)
101 {
102   PetscErrorCode              ierr;
103   PetscInt                    i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
104   ISLocalToGlobalMapping_Basic *map;
105 
106   PetscFunctionBegin;
107   start            = mapping->globalstart;
108   end              = mapping->globalend;
109   ierr             = PetscNew(&map);CHKERRQ(ierr);
110   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
111   map->globals     = globals;
112   for (i=0; i<end-start+1; i++) globals[i] = -1;
113   for (i=0; i<n; i++) {
114     if (idx[i] < 0) continue;
115     globals[idx[i] - start] = i;
116   }
117   mapping->data = (void*)map;
118   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 static PetscErrorCode ISGlobalToLocalMappingSetUp_Hash(ISLocalToGlobalMapping mapping)
123 {
124   PetscErrorCode              ierr;
125   PetscInt                    i,*idx = mapping->indices,n = mapping->n;
126   ISLocalToGlobalMapping_Hash *map;
127 
128   PetscFunctionBegin;
129   ierr = PetscNew(&map);CHKERRQ(ierr);
130   ierr = PetscHMapICreate(&map->globalht);CHKERRQ(ierr);
131   for (i=0; i<n; i++ ) {
132     if (idx[i] < 0) continue;
133     ierr = PetscHMapISet(map->globalht,idx[i],i);CHKERRQ(ierr);
134   }
135   mapping->data = (void*)map;
136   ierr = PetscLogObjectMemory((PetscObject)mapping,2*n*sizeof(PetscInt));CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode ISLocalToGlobalMappingDestroy_Basic(ISLocalToGlobalMapping mapping)
141 {
142   PetscErrorCode              ierr;
143   ISLocalToGlobalMapping_Basic *map  = (ISLocalToGlobalMapping_Basic *)mapping->data;
144 
145   PetscFunctionBegin;
146   if (!map) PetscFunctionReturn(0);
147   ierr = PetscFree(map->globals);CHKERRQ(ierr);
148   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 static PetscErrorCode ISLocalToGlobalMappingDestroy_Hash(ISLocalToGlobalMapping mapping)
153 {
154   PetscErrorCode              ierr;
155   ISLocalToGlobalMapping_Hash *map  = (ISLocalToGlobalMapping_Hash*)mapping->data;
156 
157   PetscFunctionBegin;
158   if (!map) PetscFunctionReturn(0);
159   ierr = PetscHMapIDestroy(&map->globalht);CHKERRQ(ierr);
160   ierr = PetscFree(mapping->data);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #define GTOLTYPE _Basic
165 #define GTOLNAME _Basic
166 #define GTOLBS mapping->bs
167 #define GTOL(g, local) do {                  \
168     local = map->globals[g/bs - start];      \
169     local = bs*local + (g % bs);             \
170   } while (0)
171 
172 #include <../src/vec/is/utils/isltog.h>
173 
174 #define GTOLTYPE _Basic
175 #define GTOLNAME Block_Basic
176 #define GTOLBS 1
177 #define GTOL(g, local) do {                  \
178     local = map->globals[g - start];         \
179   } while (0)
180 #include <../src/vec/is/utils/isltog.h>
181 
182 #define GTOLTYPE _Hash
183 #define GTOLNAME _Hash
184 #define GTOLBS mapping->bs
185 #define GTOL(g, local) do {                         \
186     (void)PetscHMapIGet(map->globalht,g/bs,&local); \
187     local = bs*local + (g % bs);                    \
188    } while (0)
189 #include <../src/vec/is/utils/isltog.h>
190 
191 #define GTOLTYPE _Hash
192 #define GTOLNAME Block_Hash
193 #define GTOLBS 1
194 #define GTOL(g, local) do {                         \
195     (void)PetscHMapIGet(map->globalht,g,&local);    \
196   } while (0)
197 #include <../src/vec/is/utils/isltog.h>
198 
199 /*@
200     ISLocalToGlobalMappingDuplicate - Duplicates the local to global mapping object
201 
202     Not Collective
203 
204     Input Parameter:
205 .   ltog - local to global mapping
206 
207     Output Parameter:
208 .   nltog - the duplicated local to global mapping
209 
210     Level: advanced
211 
212     Concepts: mapping^local to global
213 
214 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
215 @*/
216 PetscErrorCode  ISLocalToGlobalMappingDuplicate(ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping* nltog)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
222   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)ltog),ltog->bs,ltog->n,ltog->indices,PETSC_COPY_VALUES,nltog);CHKERRQ(ierr);
223   PetscFunctionReturn(0);
224 }
225 
226 /*@
227     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
228 
229     Not Collective
230 
231     Input Parameter:
232 .   ltog - local to global mapping
233 
234     Output Parameter:
235 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
236 
237     Level: advanced
238 
239     Concepts: mapping^local to global
240 
241 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
242 @*/
243 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
244 {
245   PetscFunctionBegin;
246   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
247   PetscValidIntPointer(n,2);
248   *n = mapping->bs*mapping->n;
249   PetscFunctionReturn(0);
250 }
251 
252 /*@C
253     ISLocalToGlobalMappingView - View a local to global mapping
254 
255     Not Collective
256 
257     Input Parameters:
258 +   ltog - local to global mapping
259 -   viewer - viewer
260 
261     Level: advanced
262 
263     Concepts: mapping^local to global
264 
265 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
266 @*/
267 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
268 {
269   PetscInt       i;
270   PetscMPIInt    rank;
271   PetscBool      iascii;
272   PetscErrorCode ierr;
273 
274   PetscFunctionBegin;
275   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
276   if (!viewer) {
277     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
278   }
279   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
280 
281   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
282   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
283   if (iascii) {
284     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
285     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
286     for (i=0; i<mapping->n; i++) {
287       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
288     }
289     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
290     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
291   }
292   PetscFunctionReturn(0);
293 }
294 
295 /*@
296     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
297     ordering and a global parallel ordering.
298 
299     Not collective
300 
301     Input Parameter:
302 .   is - index set containing the global numbers for each local number
303 
304     Output Parameter:
305 .   mapping - new mapping data structure
306 
307     Notes:
308     the block size of the IS determines the block size of the mapping
309     Level: advanced
310 
311     Concepts: mapping^local to global
312 
313 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetFromOptions()
314 @*/
315 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
316 {
317   PetscErrorCode ierr;
318   PetscInt       n,bs;
319   const PetscInt *indices;
320   MPI_Comm       comm;
321   PetscBool      isblock;
322 
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(is,IS_CLASSID,1);
325   PetscValidPointer(mapping,2);
326 
327   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
328   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
329   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
330   if (!isblock) {
331     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
332     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
333     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
334   } else {
335     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
336     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
337     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
338     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
339   }
340   PetscFunctionReturn(0);
341 }
342 
343 /*@C
344     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
345     ordering and a global parallel ordering.
346 
347     Collective
348 
349     Input Parameter:
350 +   sf - star forest mapping contiguous local indices to (rank, offset)
351 -   start - first global index on this process
352 
353     Output Parameter:
354 .   mapping - new mapping data structure
355 
356     Level: advanced
357 
358     Concepts: mapping^local to global
359 
360 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions()
361 @*/
362 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
363 {
364   PetscErrorCode ierr;
365   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
366   const PetscInt *ilocal;
367   MPI_Comm       comm;
368 
369   PetscFunctionBegin;
370   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
371   PetscValidPointer(mapping,3);
372 
373   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
374   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
375   if (ilocal) {
376     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
377   }
378   else maxlocal = nleaves;
379   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
380   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
381   for (i=0; i<nroots; i++) globals[i] = start + i;
382   for (i=0; i<maxlocal; i++) ltog[i] = -1;
383   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
384   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
385   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
386   ierr = PetscFree(globals);CHKERRQ(ierr);
387   PetscFunctionReturn(0);
388 }
389 
390 /*@
391     ISLocalToGlobalMappingSetBlockSize - Sets the blocksize of the mapping
392 
393     Not collective
394 
395     Input Parameters:
396 .   mapping - mapping data structure
397 .   bs - the blocksize
398 
399     Level: advanced
400 
401     Concepts: mapping^local to global
402 
403 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
404 @*/
405 PetscErrorCode  ISLocalToGlobalMappingSetBlockSize(ISLocalToGlobalMapping mapping,PetscInt bs)
406 {
407   PetscInt       *nid;
408   const PetscInt *oid;
409   PetscInt       i,cn,on,obs,nn;
410   PetscErrorCode ierr;
411 
412   PetscFunctionBegin;
413   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
414   if (bs < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid block size %D",bs);
415   if (bs == mapping->bs) PetscFunctionReturn(0);
416   on  = mapping->n;
417   obs = mapping->bs;
418   oid = mapping->indices;
419   nn  = (on*obs)/bs;
420   if ((on*obs)%bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block size %D is inconsistent with block size %D and number of block indices %D",bs,obs,on);
421 
422   ierr = PetscMalloc1(nn,&nid);CHKERRQ(ierr);
423   ierr = ISLocalToGlobalMappingGetIndices(mapping,&oid);CHKERRQ(ierr);
424   for (i=0;i<nn;i++) {
425     PetscInt j;
426     for (j=0,cn=0;j<bs-1;j++) {
427       if (oid[i*bs+j] < 0) { cn++; continue; }
428       if (oid[i*bs+j] != oid[i*bs+j+1]-1) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: non consecutive indices %D %D",bs,obs,oid[i*bs+j],oid[i*bs+j+1]);
429     }
430     if (oid[i*bs+j] < 0) cn++;
431     if (cn) {
432       if (cn != bs) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Block sizes %D and %D are incompatible with the block indices: invalid number of negative entries in block %D",bs,obs,cn);
433       nid[i] = -1;
434     } else {
435       nid[i] = oid[i*bs]/bs;
436     }
437   }
438   ierr = ISLocalToGlobalMappingRestoreIndices(mapping,&oid);CHKERRQ(ierr);
439 
440   mapping->n           = nn;
441   mapping->bs          = bs;
442   ierr                 = PetscFree(mapping->indices);CHKERRQ(ierr);
443   mapping->indices     = nid;
444   mapping->globalstart = 0;
445   mapping->globalend   = 0;
446 
447   /* reset the cached information */
448   ierr = PetscFree(mapping->info_procs);CHKERRQ(ierr);
449   ierr = PetscFree(mapping->info_numprocs);CHKERRQ(ierr);
450   if (mapping->info_indices) {
451     PetscInt i;
452 
453     ierr = PetscFree((mapping->info_indices)[0]);CHKERRQ(ierr);
454     for (i=1; i<mapping->info_nproc; i++) {
455       ierr = PetscFree(mapping->info_indices[i]);CHKERRQ(ierr);
456     }
457     ierr = PetscFree(mapping->info_indices);CHKERRQ(ierr);
458   }
459   mapping->info_cached = PETSC_FALSE;
460 
461   if (mapping->ops->destroy) {
462     ierr = (*mapping->ops->destroy)(mapping);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 /*@
468     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
469     ordering and a global parallel ordering.
470 
471     Not Collective
472 
473     Input Parameters:
474 .   mapping - mapping data structure
475 
476     Output Parameter:
477 .   bs - the blocksize
478 
479     Level: advanced
480 
481     Concepts: mapping^local to global
482 
483 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
484 @*/
485 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
486 {
487   PetscFunctionBegin;
488   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
489   *bs = mapping->bs;
490   PetscFunctionReturn(0);
491 }
492 
493 /*@
494     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
495     ordering and a global parallel ordering.
496 
497     Not Collective, but communicator may have more than one process
498 
499     Input Parameters:
500 +   comm - MPI communicator
501 .   bs - the block size
502 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
503 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
504 -   mode - see PetscCopyMode
505 
506     Output Parameter:
507 .   mapping - new mapping data structure
508 
509     Notes:
510     There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
511 
512     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
513     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
514     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
515 
516     Level: advanced
517 
518     Concepts: mapping^local to global
519 
520 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingSetFromOptions(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
521           ISLocalToGlobalMappingSetType(), ISLocalToGlobalMappingType
522 @*/
523 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
524 {
525   PetscErrorCode ierr;
526   PetscInt       *in;
527 
528   PetscFunctionBegin;
529   if (n) PetscValidIntPointer(indices,3);
530   PetscValidPointer(mapping,4);
531 
532   *mapping = NULL;
533   ierr = ISInitializePackage();CHKERRQ(ierr);
534 
535   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
536                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
537   (*mapping)->n             = n;
538   (*mapping)->bs            = bs;
539   (*mapping)->info_cached   = PETSC_FALSE;
540   (*mapping)->info_free     = PETSC_FALSE;
541   (*mapping)->info_procs    = NULL;
542   (*mapping)->info_numprocs = NULL;
543   (*mapping)->info_indices  = NULL;
544   (*mapping)->info_nodec    = NULL;
545   (*mapping)->info_nodei    = NULL;
546 
547   (*mapping)->ops->globaltolocalmappingapply      = NULL;
548   (*mapping)->ops->globaltolocalmappingapplyblock = NULL;
549   (*mapping)->ops->destroy                        = NULL;
550   if (mode == PETSC_COPY_VALUES) {
551     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
552     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
553     (*mapping)->indices = in;
554     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
555   } else if (mode == PETSC_OWN_POINTER) {
556     (*mapping)->indices = (PetscInt*)indices;
557     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
558   }
559   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
560   PetscFunctionReturn(0);
561 }
562 
563 PetscFunctionList ISLocalToGlobalMappingList = NULL;
564 
565 
566 /*@
567    ISLocalToGlobalMappingSetFromOptions - Set mapping options from the options database.
568 
569    Not collective
570 
571    Input Parameters:
572 .  mapping - mapping data structure
573 
574    Level: advanced
575 
576 @*/
577 PetscErrorCode ISLocalToGlobalMappingSetFromOptions(ISLocalToGlobalMapping mapping)
578 {
579   PetscErrorCode             ierr;
580   char                       type[256];
581   ISLocalToGlobalMappingType defaulttype = "Not set";
582   PetscBool                  flg;
583 
584   PetscFunctionBegin;
585   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
586   ierr = ISLocalToGlobalMappingRegisterAll();CHKERRQ(ierr);
587   ierr = PetscObjectOptionsBegin((PetscObject)mapping);CHKERRQ(ierr);
588   ierr = PetscOptionsFList("-islocaltoglobalmapping_type","ISLocalToGlobalMapping method","ISLocalToGlobalMappingSetType",ISLocalToGlobalMappingList,(char*)(((PetscObject)mapping)->type_name) ? ((PetscObject)mapping)->type_name : defaulttype,type,256,&flg);CHKERRQ(ierr);
589   if (flg) {
590     ierr = ISLocalToGlobalMappingSetType(mapping,type);CHKERRQ(ierr);
591   }
592   ierr = PetscOptionsEnd();CHKERRQ(ierr);
593   PetscFunctionReturn(0);
594 }
595 
596 /*@
597    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
598    ordering and a global parallel ordering.
599 
600    Note Collective
601 
602    Input Parameters:
603 .  mapping - mapping data structure
604 
605    Level: advanced
606 
607 .seealso: ISLocalToGlobalMappingCreate()
608 @*/
609 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
610 {
611   PetscErrorCode ierr;
612 
613   PetscFunctionBegin;
614   if (!*mapping) PetscFunctionReturn(0);
615   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
616   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
617   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
618   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
619   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
620   if ((*mapping)->info_indices) {
621     PetscInt i;
622 
623     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
624     for (i=1; i<(*mapping)->info_nproc; i++) {
625       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
626     }
627     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
628   }
629   if ((*mapping)->info_nodei) {
630     ierr = PetscFree(((*mapping)->info_nodei)[0]);CHKERRQ(ierr);
631   }
632   ierr = PetscFree((*mapping)->info_nodei);CHKERRQ(ierr);
633   ierr = PetscFree((*mapping)->info_nodec);CHKERRQ(ierr);
634   if ((*mapping)->ops->destroy) {
635     ierr = (*(*mapping)->ops->destroy)(*mapping);CHKERRQ(ierr);
636   }
637   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
638   *mapping = 0;
639   PetscFunctionReturn(0);
640 }
641 
642 /*@
643     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
644     a new index set using the global numbering defined in an ISLocalToGlobalMapping
645     context.
646 
647     Collective on is
648 
649     Input Parameters:
650 +   mapping - mapping between local and global numbering
651 -   is - index set in local numbering
652 
653     Output Parameters:
654 .   newis - index set in global numbering
655 
656     Notes:
657     The output IS will have the same communicator of the input IS.
658 
659     Level: advanced
660 
661     Concepts: mapping^local to global
662 
663 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
664           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
665 @*/
666 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
667 {
668   PetscErrorCode ierr;
669   PetscInt       n,*idxout;
670   const PetscInt *idxin;
671 
672   PetscFunctionBegin;
673   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
674   PetscValidHeaderSpecific(is,IS_CLASSID,2);
675   PetscValidPointer(newis,3);
676 
677   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
678   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
679   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
680   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
681   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
682   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
683   PetscFunctionReturn(0);
684 }
685 
686 /*@
687    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
688    and converts them to the global numbering.
689 
690    Not collective
691 
692    Input Parameters:
693 +  mapping - the local to global mapping context
694 .  N - number of integers
695 -  in - input indices in local numbering
696 
697    Output Parameter:
698 .  out - indices in global numbering
699 
700    Notes:
701    The in and out array parameters may be identical.
702 
703    Level: advanced
704 
705 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
706           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
707           AOPetscToApplication(), ISGlobalToLocalMappingApply()
708 
709     Concepts: mapping^local to global
710 @*/
711 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
712 {
713   PetscInt i,bs,Nmax;
714 
715   PetscFunctionBegin;
716   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
717   bs   = mapping->bs;
718   Nmax = bs*mapping->n;
719   if (bs == 1) {
720     const PetscInt *idx = mapping->indices;
721     for (i=0; i<N; i++) {
722       if (in[i] < 0) {
723         out[i] = in[i];
724         continue;
725       }
726       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
727       out[i] = idx[in[i]];
728     }
729   } else {
730     const PetscInt *idx = mapping->indices;
731     for (i=0; i<N; i++) {
732       if (in[i] < 0) {
733         out[i] = in[i];
734         continue;
735       }
736       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
737       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
738     }
739   }
740   PetscFunctionReturn(0);
741 }
742 
743 /*@
744    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering and converts them to the global block numbering
745 
746    Not collective
747 
748    Input Parameters:
749 +  mapping - the local to global mapping context
750 .  N - number of integers
751 -  in - input indices in local block numbering
752 
753    Output Parameter:
754 .  out - indices in global block numbering
755 
756    Notes:
757    The in and out array parameters may be identical.
758 
759    Example:
760      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
761      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
762 
763    Level: advanced
764 
765 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
766           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
767           AOPetscToApplication(), ISGlobalToLocalMappingApply()
768 
769     Concepts: mapping^local to global
770 @*/
771 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
772 {
773 
774   PetscFunctionBegin;
775   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
776   {
777     PetscInt i,Nmax = mapping->n;
778     const PetscInt *idx = mapping->indices;
779 
780     for (i=0; i<N; i++) {
781       if (in[i] < 0) {
782         out[i] = in[i];
783         continue;
784       }
785       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
786       out[i] = idx[in[i]];
787     }
788   }
789   PetscFunctionReturn(0);
790 }
791 
792 /*@
793     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
794     specified with a global numbering.
795 
796     Not collective
797 
798     Input Parameters:
799 +   mapping - mapping between local and global numbering
800 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
801            IS_GTOLM_DROP - drops the indices with no local value from the output list
802 .   n - number of global indices to map
803 -   idx - global indices to map
804 
805     Output Parameters:
806 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
807 -   idxout - local index of each global index, one must pass in an array long enough
808              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
809              idxout == NULL to determine the required length (returned in nout)
810              and then allocate the required space and call ISGlobalToLocalMappingApply()
811              a second time to set the values.
812 
813     Notes:
814     Either nout or idxout may be NULL. idx and idxout may be identical.
815 
816     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
817     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
818     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
819 
820     Level: advanced
821 
822     Developer Note: The manual page states that idx and idxout may be identical but the calling
823        sequence declares idx as const so it cannot be the same as idxout.
824 
825     Concepts: mapping^global to local
826 
827 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
828           ISLocalToGlobalMappingDestroy()
829 @*/
830 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
831 {
832   PetscErrorCode ierr;
833 
834   PetscFunctionBegin;
835   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
836   if (!mapping->data) {
837     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
838   }
839   ierr = (*mapping->ops->globaltolocalmappingapply)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
840   PetscFunctionReturn(0);
841 }
842 
843 /*@
844     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
845     a new index set using the local numbering defined in an ISLocalToGlobalMapping
846     context.
847 
848     Not collective
849 
850     Input Parameters:
851 +   mapping - mapping between local and global numbering
852 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
853            IS_GTOLM_DROP - drops the indices with no local value from the output list
854 -   is - index set in global numbering
855 
856     Output Parameters:
857 .   newis - index set in local numbering
858 
859     Notes:
860     The output IS will be sequential, as it encodes a purely local operation
861 
862     Level: advanced
863 
864     Concepts: mapping^local to global
865 
866 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
867           ISLocalToGlobalMappingDestroy()
868 @*/
869 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,IS is,IS *newis)
870 {
871   PetscErrorCode ierr;
872   PetscInt       n,nout,*idxout;
873   const PetscInt *idxin;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
877   PetscValidHeaderSpecific(is,IS_CLASSID,3);
878   PetscValidPointer(newis,4);
879 
880   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
881   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
882   if (type == IS_GTOLM_MASK) {
883     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
884   } else {
885     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
886     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
887   }
888   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
889   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
890   ierr = ISCreateGeneral(PETSC_COMM_SELF,nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 /*@
895     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
896     specified with a block global numbering.
897 
898     Not collective
899 
900     Input Parameters:
901 +   mapping - mapping between local and global numbering
902 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
903            IS_GTOLM_DROP - drops the indices with no local value from the output list
904 .   n - number of global indices to map
905 -   idx - global indices to map
906 
907     Output Parameters:
908 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
909 -   idxout - local index of each global index, one must pass in an array long enough
910              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
911              idxout == NULL to determine the required length (returned in nout)
912              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
913              a second time to set the values.
914 
915     Notes:
916     Either nout or idxout may be NULL. idx and idxout may be identical.
917 
918     For "small" problems when using ISGlobalToLocalMappingApply() and ISGlobalToLocalMappingApplyBlock(), the ISLocalToGlobalMappingType of ISLOCALTOGLOBALMAPPINGBASIC will be used;
919     this uses more memory but is faster; this approach is not scalable for extremely large mappings. For large problems ISLOCALTOGLOBALMAPPINGHASH is used, this is scalable.
920     Use ISLocalToGlobalMappingSetType() or call ISLocalToGlobalMappingSetFromOptions() with the option -islocaltoglobalmapping_type <basic,hash> to control which is used.
921 
922     Level: advanced
923 
924     Developer Note: The manual page states that idx and idxout may be identical but the calling
925        sequence declares idx as const so it cannot be the same as idxout.
926 
927     Concepts: mapping^global to local
928 
929 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
930           ISLocalToGlobalMappingDestroy()
931 @*/
932 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingMode type,
933                                                  PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
934 {
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
939   if (!mapping->data) {
940     ierr = ISGlobalToLocalMappingSetUp(mapping);CHKERRQ(ierr);
941   }
942   ierr = (*mapping->ops->globaltolocalmappingapplyblock)(mapping,type,n,idx,nout,idxout);CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 
947 /*@C
948     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
949      each index shared by more than one processor
950 
951     Collective on ISLocalToGlobalMapping
952 
953     Input Parameters:
954 .   mapping - the mapping from local to global indexing
955 
956     Output Parameter:
957 +   nproc - number of processors that are connected to this one
958 .   proc - neighboring processors
959 .   numproc - number of indices for each subdomain (processor)
960 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
961 
962     Level: advanced
963 
964     Concepts: mapping^local to global
965 
966     Fortran Usage:
967 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
968 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
969           PetscInt indices[nproc][numprocmax],ierr)
970         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
971         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
972 
973 
974 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
975           ISLocalToGlobalMappingRestoreInfo()
976 @*/
977 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
978 {
979   PetscErrorCode ierr;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
983   if (mapping->info_cached) {
984     *nproc    = mapping->info_nproc;
985     *procs    = mapping->info_procs;
986     *numprocs = mapping->info_numprocs;
987     *indices  = mapping->info_indices;
988   } else {
989     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
990   }
991   PetscFunctionReturn(0);
992 }
993 
994 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
995 {
996   PetscErrorCode ierr;
997   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
998   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
999   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
1000   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
1001   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
1002   PetscInt       first_procs,first_numprocs,*first_indices;
1003   MPI_Request    *recv_waits,*send_waits;
1004   MPI_Status     recv_status,*send_status,*recv_statuses;
1005   MPI_Comm       comm;
1006   PetscBool      debug = PETSC_FALSE;
1007 
1008   PetscFunctionBegin;
1009   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
1010   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1011   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1012   if (size == 1) {
1013     *nproc         = 0;
1014     *procs         = NULL;
1015     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
1016     (*numprocs)[0] = 0;
1017     ierr           = PetscNew(indices);CHKERRQ(ierr);
1018     (*indices)[0]  = NULL;
1019     /* save info for reuse */
1020     mapping->info_nproc = *nproc;
1021     mapping->info_procs = *procs;
1022     mapping->info_numprocs = *numprocs;
1023     mapping->info_indices = *indices;
1024     mapping->info_cached = PETSC_TRUE;
1025     PetscFunctionReturn(0);
1026   }
1027 
1028   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
1029 
1030   /*
1031     Notes on ISLocalToGlobalMappingGetBlockInfo
1032 
1033     globally owned node - the nodes that have been assigned to this processor in global
1034            numbering, just for this routine.
1035 
1036     nontrivial globally owned node - node assigned to this processor that is on a subdomain
1037            boundary (i.e. is has more than one local owner)
1038 
1039     locally owned node - node that exists on this processors subdomain
1040 
1041     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
1042            local subdomain
1043   */
1044   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
1045   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
1046   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
1047 
1048   for (i=0; i<n; i++) {
1049     if (lindices[i] > max) max = lindices[i];
1050   }
1051   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
1052   Ng++;
1053   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1054   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1055   scale  = Ng/size + 1;
1056   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
1057   rstart = scale*rank;
1058 
1059   /* determine ownership ranges of global indices */
1060   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
1061   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
1062 
1063   /* determine owners of each local node  */
1064   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
1065   for (i=0; i<n; i++) {
1066     proc             = lindices[i]/scale; /* processor that globally owns this index */
1067     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
1068     owner[i]         = proc;
1069     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
1070   }
1071   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
1072   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
1073 
1074   /* inform other processors of number of messages and max length*/
1075   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
1076   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
1077 
1078   /* post receives for owned rows */
1079   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
1080   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
1081   for (i=0; i<nrecvs; i++) {
1082     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
1083   }
1084 
1085   /* pack messages containing lists of local nodes to owners */
1086   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
1087   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
1088   starts[0] = 0;
1089   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1090   for (i=0; i<n; i++) {
1091     sends[starts[owner[i]]++] = lindices[i];
1092     sends[starts[owner[i]]++] = i;
1093   }
1094   ierr = PetscFree(owner);CHKERRQ(ierr);
1095   starts[0] = 0;
1096   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
1097 
1098   /* send the messages */
1099   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
1100   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
1101   cnt = 0;
1102   for (i=0; i<size; i++) {
1103     if (nprocs[2*i]) {
1104       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
1105       dest[cnt] = i;
1106       cnt++;
1107     }
1108   }
1109   ierr = PetscFree(starts);CHKERRQ(ierr);
1110 
1111   /* wait on receives */
1112   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
1113   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
1114   cnt  = nrecvs;
1115   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
1116   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
1117   while (cnt) {
1118     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
1119     /* unpack receives into our local space */
1120     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
1121     source[imdex] = recv_status.MPI_SOURCE;
1122     len[imdex]    = len[imdex]/2;
1123     /* count how many local owners for each of my global owned indices */
1124     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
1125     cnt--;
1126   }
1127   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1128 
1129   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
1130   nowned  = 0;
1131   nownedm = 0;
1132   for (i=0; i<ng; i++) {
1133     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
1134   }
1135 
1136   /* create single array to contain rank of all local owners of each globally owned index */
1137   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
1138   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
1139   starts[0] = 0;
1140   for (i=1; i<ng; i++) {
1141     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1142     else starts[i] = starts[i-1];
1143   }
1144 
1145   /* for each nontrival globally owned node list all arriving processors */
1146   for (i=0; i<nrecvs; i++) {
1147     for (j=0; j<len[i]; j++) {
1148       node = recvs[2*i*nmax+2*j]-rstart;
1149       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
1150     }
1151   }
1152 
1153   if (debug) { /* -----------------------------------  */
1154     starts[0] = 0;
1155     for (i=1; i<ng; i++) {
1156       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1157       else starts[i] = starts[i-1];
1158     }
1159     for (i=0; i<ng; i++) {
1160       if (nownedsenders[i] > 1) {
1161         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
1162         for (j=0; j<nownedsenders[i]; j++) {
1163           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
1164         }
1165         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1166       }
1167     }
1168     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1169   } /* -----------------------------------  */
1170 
1171   /* wait on original sends */
1172   if (nsends) {
1173     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
1174     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
1175     ierr = PetscFree(send_status);CHKERRQ(ierr);
1176   }
1177   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1178   ierr = PetscFree(sends);CHKERRQ(ierr);
1179   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1180 
1181   /* pack messages to send back to local owners */
1182   starts[0] = 0;
1183   for (i=1; i<ng; i++) {
1184     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
1185     else starts[i] = starts[i-1];
1186   }
1187   nsends2 = nrecvs;
1188   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
1189   for (i=0; i<nrecvs; i++) {
1190     nprocs[i] = 1;
1191     for (j=0; j<len[i]; j++) {
1192       node = recvs[2*i*nmax+2*j]-rstart;
1193       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
1194     }
1195   }
1196   nt = 0;
1197   for (i=0; i<nsends2; i++) nt += nprocs[i];
1198 
1199   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
1200   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
1201 
1202   starts2[0] = 0;
1203   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
1204   /*
1205      Each message is 1 + nprocs[i] long, and consists of
1206        (0) the number of nodes being sent back
1207        (1) the local node number,
1208        (2) the number of processors sharing it,
1209        (3) the processors sharing it
1210   */
1211   for (i=0; i<nsends2; i++) {
1212     cnt = 1;
1213     sends2[starts2[i]] = 0;
1214     for (j=0; j<len[i]; j++) {
1215       node = recvs[2*i*nmax+2*j]-rstart;
1216       if (nownedsenders[node] > 1) {
1217         sends2[starts2[i]]++;
1218         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1219         sends2[starts2[i]+cnt++] = nownedsenders[node];
1220         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1221         cnt += nownedsenders[node];
1222       }
1223     }
1224   }
1225 
1226   /* receive the message lengths */
1227   nrecvs2 = nsends;
1228   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1229   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1230   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1231   for (i=0; i<nrecvs2; i++) {
1232     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1233   }
1234 
1235   /* send the message lengths */
1236   for (i=0; i<nsends2; i++) {
1237     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1238   }
1239 
1240   /* wait on receives of lens */
1241   if (nrecvs2) {
1242     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1243     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1244     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1245   }
1246   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1247 
1248   starts3[0] = 0;
1249   nt         = 0;
1250   for (i=0; i<nrecvs2-1; i++) {
1251     starts3[i+1] = starts3[i] + lens2[i];
1252     nt          += lens2[i];
1253   }
1254   if (nrecvs2) nt += lens2[nrecvs2-1];
1255 
1256   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1257   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1258   for (i=0; i<nrecvs2; i++) {
1259     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1260   }
1261 
1262   /* send the messages */
1263   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1264   for (i=0; i<nsends2; i++) {
1265     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1266   }
1267 
1268   /* wait on receives */
1269   if (nrecvs2) {
1270     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1271     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1272     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1273   }
1274   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1275   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1276 
1277   if (debug) { /* -----------------------------------  */
1278     cnt = 0;
1279     for (i=0; i<nrecvs2; i++) {
1280       nt = recvs2[cnt++];
1281       for (j=0; j<nt; j++) {
1282         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1283         for (k=0; k<recvs2[cnt+1]; k++) {
1284           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1285         }
1286         cnt += 2 + recvs2[cnt+1];
1287         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1288       }
1289     }
1290     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1291   } /* -----------------------------------  */
1292 
1293   /* count number subdomains for each local node */
1294   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1295   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1296   cnt  = 0;
1297   for (i=0; i<nrecvs2; i++) {
1298     nt = recvs2[cnt++];
1299     for (j=0; j<nt; j++) {
1300       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1301       cnt += 2 + recvs2[cnt+1];
1302     }
1303   }
1304   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1305   *nproc    = nt;
1306   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1307   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1308   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1309   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1310   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1311   cnt  = 0;
1312   for (i=0; i<size; i++) {
1313     if (nprocs[i] > 0) {
1314       bprocs[i]        = cnt;
1315       (*procs)[cnt]    = i;
1316       (*numprocs)[cnt] = nprocs[i];
1317       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1318       cnt++;
1319     }
1320   }
1321 
1322   /* make the list of subdomains for each nontrivial local node */
1323   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1324   cnt  = 0;
1325   for (i=0; i<nrecvs2; i++) {
1326     nt = recvs2[cnt++];
1327     for (j=0; j<nt; j++) {
1328       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1329       cnt += 2 + recvs2[cnt+1];
1330     }
1331   }
1332   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1333   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1334 
1335   /* sort the node indexing by their global numbers */
1336   nt = *nproc;
1337   for (i=0; i<nt; i++) {
1338     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1339     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1340     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1341     ierr = PetscFree(tmp);CHKERRQ(ierr);
1342   }
1343 
1344   if (debug) { /* -----------------------------------  */
1345     nt = *nproc;
1346     for (i=0; i<nt; i++) {
1347       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1348       for (j=0; j<(*numprocs)[i]; j++) {
1349         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1350       }
1351       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1352     }
1353     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1354   } /* -----------------------------------  */
1355 
1356   /* wait on sends */
1357   if (nsends2) {
1358     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1359     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1360     ierr = PetscFree(send_status);CHKERRQ(ierr);
1361   }
1362 
1363   ierr = PetscFree(starts3);CHKERRQ(ierr);
1364   ierr = PetscFree(dest);CHKERRQ(ierr);
1365   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1366 
1367   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1368   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1369   ierr = PetscFree(starts);CHKERRQ(ierr);
1370   ierr = PetscFree(starts2);CHKERRQ(ierr);
1371   ierr = PetscFree(lens2);CHKERRQ(ierr);
1372 
1373   ierr = PetscFree(source);CHKERRQ(ierr);
1374   ierr = PetscFree(len);CHKERRQ(ierr);
1375   ierr = PetscFree(recvs);CHKERRQ(ierr);
1376   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1377   ierr = PetscFree(sends2);CHKERRQ(ierr);
1378 
1379   /* put the information about myself as the first entry in the list */
1380   first_procs    = (*procs)[0];
1381   first_numprocs = (*numprocs)[0];
1382   first_indices  = (*indices)[0];
1383   for (i=0; i<*nproc; i++) {
1384     if ((*procs)[i] == rank) {
1385       (*procs)[0]    = (*procs)[i];
1386       (*numprocs)[0] = (*numprocs)[i];
1387       (*indices)[0]  = (*indices)[i];
1388       (*procs)[i]    = first_procs;
1389       (*numprocs)[i] = first_numprocs;
1390       (*indices)[i]  = first_indices;
1391       break;
1392     }
1393   }
1394 
1395   /* save info for reuse */
1396   mapping->info_nproc = *nproc;
1397   mapping->info_procs = *procs;
1398   mapping->info_numprocs = *numprocs;
1399   mapping->info_indices = *indices;
1400   mapping->info_cached = PETSC_TRUE;
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 /*@C
1405     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1406 
1407     Collective on ISLocalToGlobalMapping
1408 
1409     Input Parameters:
1410 .   mapping - the mapping from local to global indexing
1411 
1412     Output Parameter:
1413 +   nproc - number of processors that are connected to this one
1414 .   proc - neighboring processors
1415 .   numproc - number of indices for each processor
1416 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1417 
1418     Level: advanced
1419 
1420 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1421           ISLocalToGlobalMappingGetInfo()
1422 @*/
1423 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1424 {
1425   PetscErrorCode ierr;
1426 
1427   PetscFunctionBegin;
1428   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1429   if (mapping->info_free) {
1430     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1431     if (*indices) {
1432       PetscInt i;
1433 
1434       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1435       for (i=1; i<*nproc; i++) {
1436         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1437       }
1438       ierr = PetscFree(*indices);CHKERRQ(ierr);
1439     }
1440   }
1441   *nproc    = 0;
1442   *procs    = NULL;
1443   *numprocs = NULL;
1444   *indices  = NULL;
1445   PetscFunctionReturn(0);
1446 }
1447 
1448 /*@C
1449     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1450      each index shared by more than one processor
1451 
1452     Collective on ISLocalToGlobalMapping
1453 
1454     Input Parameters:
1455 .   mapping - the mapping from local to global indexing
1456 
1457     Output Parameter:
1458 +   nproc - number of processors that are connected to this one
1459 .   proc - neighboring processors
1460 .   numproc - number of indices for each subdomain (processor)
1461 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1462 
1463     Level: advanced
1464 
1465     Concepts: mapping^local to global
1466 
1467     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1468 
1469     Fortran Usage:
1470 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1471 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1472           PetscInt indices[nproc][numprocmax],ierr)
1473         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1474         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1475 
1476 
1477 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1478           ISLocalToGlobalMappingRestoreInfo()
1479 @*/
1480 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1481 {
1482   PetscErrorCode ierr;
1483   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1487   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1488   if (bs > 1) { /* we need to expand the cached info */
1489     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1490     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1491     for (i=0; i<*nproc; i++) {
1492       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1493       for (j=0; j<bnumprocs[i]; j++) {
1494         for (k=0; k<bs; k++) {
1495           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1496         }
1497       }
1498       (*numprocs)[i] = bnumprocs[i]*bs;
1499     }
1500     mapping->info_free = PETSC_TRUE;
1501   } else {
1502     *numprocs = bnumprocs;
1503     *indices  = bindices;
1504   }
1505   PetscFunctionReturn(0);
1506 }
1507 
1508 /*@C
1509     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1510 
1511     Collective on ISLocalToGlobalMapping
1512 
1513     Input Parameters:
1514 .   mapping - the mapping from local to global indexing
1515 
1516     Output Parameter:
1517 +   nproc - number of processors that are connected to this one
1518 .   proc - neighboring processors
1519 .   numproc - number of indices for each processor
1520 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1521 
1522     Level: advanced
1523 
1524 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1525           ISLocalToGlobalMappingGetInfo()
1526 @*/
1527 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1528 {
1529   PetscErrorCode ierr;
1530 
1531   PetscFunctionBegin;
1532   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1533   PetscFunctionReturn(0);
1534 }
1535 
1536 /*@C
1537     ISLocalToGlobalMappingGetNodeInfo - Gets the neighbor information for each node
1538 
1539     Collective on ISLocalToGlobalMapping
1540 
1541     Input Parameters:
1542 .   mapping - the mapping from local to global indexing
1543 
1544     Output Parameter:
1545 +   nnodes - number of local nodes (same ISLocalToGlobalMappingGetSize())
1546 .   count - number of neighboring processors per node
1547 -   indices - indices of processes sharing the node (sorted)
1548 
1549     Level: advanced
1550 
1551     Concepts: mapping^local to global
1552 
1553     Notes: The user needs to call ISLocalToGlobalMappingRestoreInfo when the data is no longer needed.
1554 
1555 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1556           ISLocalToGlobalMappingGetInfo(), ISLocalToGlobalMappingRestoreNodeInfo()
1557 @*/
1558 PetscErrorCode  ISLocalToGlobalMappingGetNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1559 {
1560   PetscInt       n;
1561   PetscErrorCode ierr;
1562 
1563   PetscFunctionBegin;
1564   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1565   ierr = ISLocalToGlobalMappingGetSize(mapping,&n);CHKERRQ(ierr);
1566   if (!mapping->info_nodec) {
1567     PetscInt i,m,n_neigh,*neigh,*n_shared,**shared;
1568 
1569     ierr = PetscCalloc1(n+1,&mapping->info_nodec);CHKERRQ(ierr); /* always allocate to flag setup */
1570     ierr = PetscMalloc1(n,&mapping->info_nodei);CHKERRQ(ierr);
1571     ierr = ISLocalToGlobalMappingGetInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1572     for (i=0,m=0;i<n;i++) { mapping->info_nodec[i] = 1; m++; }
1573     for (i=1;i<n_neigh;i++) {
1574       PetscInt j;
1575 
1576       m += n_shared[i];
1577       for (j=0;j<n_shared[i];j++) mapping->info_nodec[shared[i][j]] += 1;
1578     }
1579     if (n) { ierr = PetscMalloc1(m,&mapping->info_nodei[0]);CHKERRQ(ierr); }
1580     for (i=1;i<n;i++) mapping->info_nodei[i] = mapping->info_nodei[i-1] + mapping->info_nodec[i-1];
1581     ierr = PetscMemzero(mapping->info_nodec,n*sizeof(PetscInt));CHKERRQ(ierr);
1582     for (i=0;i<n;i++) { mapping->info_nodec[i] = 1; mapping->info_nodei[i][0] = neigh[0]; }
1583     for (i=1;i<n_neigh;i++) {
1584       PetscInt j;
1585 
1586       for (j=0;j<n_shared[i];j++) {
1587         PetscInt k = shared[i][j];
1588 
1589         mapping->info_nodei[k][mapping->info_nodec[k]] = neigh[i];
1590         mapping->info_nodec[k] += 1;
1591       }
1592     }
1593     for (i=0;i<n;i++) { ierr = PetscSortRemoveDupsInt(&mapping->info_nodec[i],mapping->info_nodei[i]);CHKERRQ(ierr); }
1594     ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&n_neigh,&neigh,&n_shared,&shared);CHKERRQ(ierr);
1595   }
1596   if (nnodes)  *nnodes  = n;
1597   if (count)   *count   = mapping->info_nodec;
1598   if (indices) *indices = mapping->info_nodei;
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 /*@C
1603     ISLocalToGlobalMappingRestoreNodeInfo - Frees the memory allocated by ISLocalToGlobalMappingGetNodeInfo()
1604 
1605     Collective on ISLocalToGlobalMapping
1606 
1607     Input Parameters:
1608 .   mapping - the mapping from local to global indexing
1609 
1610     Output Parameter:
1611 +   nnodes - number of local nodes
1612 .   count - number of neighboring processors per node
1613 -   indices - indices of processes sharing the node (sorted)
1614 
1615     Level: advanced
1616 
1617 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1618           ISLocalToGlobalMappingGetInfo()
1619 @*/
1620 PetscErrorCode  ISLocalToGlobalMappingRestoreNodeInfo(ISLocalToGlobalMapping mapping,PetscInt *nnodes,PetscInt *count[],PetscInt **indices[])
1621 {
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1624   if (nnodes)  *nnodes  = 0;
1625   if (count)   *count   = NULL;
1626   if (indices) *indices = NULL;
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 /*@C
1631    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1632 
1633    Not Collective
1634 
1635    Input Arguments:
1636 . ltog - local to global mapping
1637 
1638    Output Arguments:
1639 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1640 
1641    Level: advanced
1642 
1643    Notes:
1644     ISLocalToGlobalMappingGetSize() returns the length the this array
1645 
1646 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1647 @*/
1648 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1649 {
1650   PetscFunctionBegin;
1651   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1652   PetscValidPointer(array,2);
1653   if (ltog->bs == 1) {
1654     *array = ltog->indices;
1655   } else {
1656     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1657     const PetscInt *ii;
1658     PetscErrorCode ierr;
1659 
1660     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1661     *array = jj;
1662     k    = 0;
1663     ii   = ltog->indices;
1664     for (i=0; i<n; i++)
1665       for (j=0; j<bs; j++)
1666         jj[k++] = bs*ii[i] + j;
1667   }
1668   PetscFunctionReturn(0);
1669 }
1670 
1671 /*@C
1672    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1673 
1674    Not Collective
1675 
1676    Input Arguments:
1677 + ltog - local to global mapping
1678 - array - array of indices
1679 
1680    Level: advanced
1681 
1682 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1683 @*/
1684 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1685 {
1686   PetscFunctionBegin;
1687   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1688   PetscValidPointer(array,2);
1689   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1690 
1691   if (ltog->bs > 1) {
1692     PetscErrorCode ierr;
1693     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1694   }
1695   PetscFunctionReturn(0);
1696 }
1697 
1698 /*@C
1699    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1700 
1701    Not Collective
1702 
1703    Input Arguments:
1704 . ltog - local to global mapping
1705 
1706    Output Arguments:
1707 . array - array of indices
1708 
1709    Level: advanced
1710 
1711 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1712 @*/
1713 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1714 {
1715   PetscFunctionBegin;
1716   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1717   PetscValidPointer(array,2);
1718   *array = ltog->indices;
1719   PetscFunctionReturn(0);
1720 }
1721 
1722 /*@C
1723    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1724 
1725    Not Collective
1726 
1727    Input Arguments:
1728 + ltog - local to global mapping
1729 - array - array of indices
1730 
1731    Level: advanced
1732 
1733 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1734 @*/
1735 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1736 {
1737   PetscFunctionBegin;
1738   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1739   PetscValidPointer(array,2);
1740   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1741   *array = NULL;
1742   PetscFunctionReturn(0);
1743 }
1744 
1745 /*@C
1746    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1747 
1748    Not Collective
1749 
1750    Input Arguments:
1751 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1752 . n - number of mappings to concatenate
1753 - ltogs - local to global mappings
1754 
1755    Output Arguments:
1756 . ltogcat - new mapping
1757 
1758    Note: this currently always returns a mapping with block size of 1
1759 
1760    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1761 
1762    Level: advanced
1763 
1764 .seealso: ISLocalToGlobalMappingCreate()
1765 @*/
1766 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1767 {
1768   PetscInt       i,cnt,m,*idx;
1769   PetscErrorCode ierr;
1770 
1771   PetscFunctionBegin;
1772   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1773   if (n > 0) PetscValidPointer(ltogs,3);
1774   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1775   PetscValidPointer(ltogcat,4);
1776   for (cnt=0,i=0; i<n; i++) {
1777     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1778     cnt += m;
1779   }
1780   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1781   for (cnt=0,i=0; i<n; i++) {
1782     const PetscInt *subidx;
1783     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1784     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1785     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1786     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1787     cnt += m;
1788   }
1789   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1790   PetscFunctionReturn(0);
1791 }
1792 
1793 /*MC
1794       ISLOCALTOGLOBALMAPPINGBASIC - basic implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1795                                     used this is good for only small and moderate size problems.
1796 
1797    Options Database Keys:
1798 +   -islocaltoglobalmapping_type basic - select this method
1799 
1800    Level: beginner
1801 
1802 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1803 M*/
1804 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Basic(ISLocalToGlobalMapping ltog)
1805 {
1806   PetscFunctionBegin;
1807   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Basic;
1808   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Basic;
1809   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Basic;
1810   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Basic;
1811   PetscFunctionReturn(0);
1812 }
1813 
1814 /*MC
1815       ISLOCALTOGLOBALMAPPINGHASH - hash implementation of the ISLocalToGlobalMapping object. When ISGlobalToLocalMappingApply() is
1816                                     used this is good for large memory problems.
1817 
1818    Options Database Keys:
1819 +   -islocaltoglobalmapping_type hash - select this method
1820 
1821 
1822    Notes:
1823     This is selected automatically for large problems if the user does not set the type.
1824 
1825    Level: beginner
1826 
1827 .seealso:  ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingSetType(), ISLOCALTOGLOBALMAPPINGHASH
1828 M*/
1829 PETSC_EXTERN PetscErrorCode ISLocalToGlobalMappingCreate_Hash(ISLocalToGlobalMapping ltog)
1830 {
1831   PetscFunctionBegin;
1832   ltog->ops->globaltolocalmappingapply      = ISGlobalToLocalMappingApply_Hash;
1833   ltog->ops->globaltolocalmappingsetup      = ISGlobalToLocalMappingSetUp_Hash;
1834   ltog->ops->globaltolocalmappingapplyblock = ISGlobalToLocalMappingApplyBlock_Hash;
1835   ltog->ops->destroy                        = ISLocalToGlobalMappingDestroy_Hash;
1836   PetscFunctionReturn(0);
1837 }
1838 
1839 
1840 /*@C
1841     ISLocalToGlobalMappingRegister -  Adds a method for applying a global to local mapping with an ISLocalToGlobalMapping
1842 
1843    Not Collective
1844 
1845    Input Parameters:
1846 +  sname - name of a new method
1847 -  routine_create - routine to create method context
1848 
1849    Notes:
1850    ISLocalToGlobalMappingRegister() may be called multiple times to add several user-defined mappings.
1851 
1852    Sample usage:
1853 .vb
1854    ISLocalToGlobalMappingRegister("my_mapper",MyCreate);
1855 .ve
1856 
1857    Then, your mapping can be chosen with the procedural interface via
1858 $     ISLocalToGlobalMappingSetType(ltog,"my_mapper")
1859    or at runtime via the option
1860 $     -islocaltoglobalmapping_type my_mapper
1861 
1862    Level: advanced
1863 
1864 .keywords: ISLocalToGlobalMappingType, register
1865 
1866 .seealso: ISLocalToGlobalMappingRegisterAll(), ISLocalToGlobalMappingRegisterDestroy(), ISLOCALTOGLOBALMAPPINGBASIC, ISLOCALTOGLOBALMAPPINGHASH
1867 
1868 @*/
1869 PetscErrorCode  ISLocalToGlobalMappingRegister(const char sname[],PetscErrorCode (*function)(ISLocalToGlobalMapping))
1870 {
1871   PetscErrorCode ierr;
1872 
1873   PetscFunctionBegin;
1874   ierr = ISInitializePackage();CHKERRQ(ierr);
1875   ierr = PetscFunctionListAdd(&ISLocalToGlobalMappingList,sname,function);CHKERRQ(ierr);
1876   PetscFunctionReturn(0);
1877 }
1878 
1879 /*@C
1880    ISLocalToGlobalMappingSetType - Builds ISLocalToGlobalMapping for a particular global to local mapping approach.
1881 
1882    Logically Collective on ISLocalToGlobalMapping
1883 
1884    Input Parameters:
1885 +  ltog - the ISLocalToGlobalMapping object
1886 -  type - a known method
1887 
1888    Options Database Key:
1889 .  -islocaltoglobalmapping_type  <method> - Sets the method; use -help for a list
1890     of available methods (for instance, basic or hash)
1891 
1892    Notes:
1893    See "petsc/include/petscis.h" for available methods
1894 
1895   Normally, it is best to use the ISLocalToGlobalMappingSetFromOptions() command and
1896   then set the ISLocalToGlobalMapping type from the options database rather than by using
1897   this routine.
1898 
1899   Level: intermediate
1900 
1901   Developer Note: ISLocalToGlobalMappingRegister() is used to add new types to ISLocalToGlobalMappingList from which they
1902   are accessed by ISLocalToGlobalMappingSetType().
1903 
1904 .keywords: ISLocalToGlobalMapping, set, method
1905 
1906 .seealso: PCSetType(), ISLocalToGlobalMappingType, ISLocalToGlobalMappingRegister(), ISLocalToGlobalMappingCreate()
1907 
1908 @*/
1909 PetscErrorCode  ISLocalToGlobalMappingSetType(ISLocalToGlobalMapping ltog, ISLocalToGlobalMappingType type)
1910 {
1911   PetscErrorCode ierr,(*r)(ISLocalToGlobalMapping);
1912   PetscBool      match;
1913 
1914   PetscFunctionBegin;
1915   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1916   PetscValidCharPointer(type,2);
1917 
1918   ierr = PetscObjectTypeCompare((PetscObject)ltog,type,&match);CHKERRQ(ierr);
1919   if (match) PetscFunctionReturn(0);
1920 
1921   ierr =  PetscFunctionListFind(ISLocalToGlobalMappingList,type,&r);CHKERRQ(ierr);
1922   if (!r) SETERRQ1(PetscObjectComm((PetscObject)ltog),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested ISLocalToGlobalMapping type %s",type);
1923   /* Destroy the previous private LTOG context */
1924   if (ltog->ops->destroy) {
1925     ierr              = (*ltog->ops->destroy)(ltog);CHKERRQ(ierr);
1926     ltog->ops->destroy = NULL;
1927   }
1928   ierr = PetscObjectChangeTypeName((PetscObject)ltog,type);CHKERRQ(ierr);
1929   ierr = (*r)(ltog);CHKERRQ(ierr);
1930   PetscFunctionReturn(0);
1931 }
1932 
1933 PetscBool ISLocalToGlobalMappingRegisterAllCalled = PETSC_FALSE;
1934 
1935 /*@C
1936   ISLocalToGlobalMappingRegisterAll - Registers all of the local to global mapping components in the IS package.
1937 
1938   Not Collective
1939 
1940   Level: advanced
1941 
1942 .keywords: IS, register, all
1943 .seealso:  ISRegister(),  ISLocalToGlobalRegister()
1944 @*/
1945 PetscErrorCode  ISLocalToGlobalMappingRegisterAll(void)
1946 {
1947   PetscErrorCode ierr;
1948 
1949   PetscFunctionBegin;
1950   if (ISLocalToGlobalMappingRegisterAllCalled) PetscFunctionReturn(0);
1951   ISLocalToGlobalMappingRegisterAllCalled = PETSC_TRUE;
1952 
1953   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGBASIC, ISLocalToGlobalMappingCreate_Basic);CHKERRQ(ierr);
1954   ierr = ISLocalToGlobalMappingRegister(ISLOCALTOGLOBALMAPPINGHASH, ISLocalToGlobalMappingCreate_Hash);CHKERRQ(ierr);
1955   PetscFunctionReturn(0);
1956 }
1957 
1958