xref: /petsc/src/vec/is/utils/isltog.c (revision ba1e01c44f2f740c97c9026fe63e360558b7709b) !
1 
2 #include <petsc/private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping,PetscInt*,PetscInt**,PetscInt**,PetscInt***);
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
11 /*@
12     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
13 
14     Not Collective
15 
16     Input Parameter:
17 .   ltog - local to global mapping
18 
19     Output Parameter:
20 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
21 
22     Level: advanced
23 
24     Concepts: mapping^local to global
25 
26 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
27 @*/
28 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
29 {
30   PetscFunctionBegin;
31   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
32   PetscValidIntPointer(n,2);
33   *n = mapping->bs*mapping->n;
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNCT__
38 #define __FUNCT__ "ISLocalToGlobalMappingView"
39 /*@C
40     ISLocalToGlobalMappingView - View a local to global mapping
41 
42     Not Collective
43 
44     Input Parameters:
45 +   ltog - local to global mapping
46 -   viewer - viewer
47 
48     Level: advanced
49 
50     Concepts: mapping^local to global
51 
52 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
53 @*/
54 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
55 {
56   PetscInt       i;
57   PetscMPIInt    rank;
58   PetscBool      iascii;
59   PetscErrorCode ierr;
60 
61   PetscFunctionBegin;
62   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
63   if (!viewer) {
64     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
65   }
66   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
67 
68   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
69   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
70   if (iascii) {
71     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
72     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
73     for (i=0; i<mapping->n; i++) {
74       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
75     }
76     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
77     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
78   }
79   PetscFunctionReturn(0);
80 }
81 
82 #undef __FUNCT__
83 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
84 /*@
85     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
86     ordering and a global parallel ordering.
87 
88     Not collective
89 
90     Input Parameter:
91 .   is - index set containing the global numbers for each local number
92 
93     Output Parameter:
94 .   mapping - new mapping data structure
95 
96     Notes: the block size of the IS determines the block size of the mapping
97     Level: advanced
98 
99     Concepts: mapping^local to global
100 
101 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
102 @*/
103 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
104 {
105   PetscErrorCode ierr;
106   PetscInt       n,bs;
107   const PetscInt *indices;
108   MPI_Comm       comm;
109   PetscBool      isblock;
110 
111   PetscFunctionBegin;
112   PetscValidHeaderSpecific(is,IS_CLASSID,1);
113   PetscValidPointer(mapping,2);
114 
115   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
116   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
117   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
118   if (!isblock) {
119     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
120     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
121     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
122   } else {
123     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
124     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
125     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
126     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
127   }
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
133 /*@C
134     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
135     ordering and a global parallel ordering.
136 
137     Collective
138 
139     Input Parameter:
140 +   sf - star forest mapping contiguous local indices to (rank, offset)
141 -   start - first global index on this process
142 
143     Output Parameter:
144 .   mapping - new mapping data structure
145 
146     Level: advanced
147 
148     Concepts: mapping^local to global
149 
150 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
151 @*/
152 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
153 {
154   PetscErrorCode ierr;
155   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
156   const PetscInt *ilocal;
157   MPI_Comm       comm;
158 
159   PetscFunctionBegin;
160   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
161   PetscValidPointer(mapping,3);
162 
163   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
164   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
165   if (ilocal) {
166     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
167   }
168   else maxlocal = nleaves;
169   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
170   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
171   for (i=0; i<nroots; i++) globals[i] = start + i;
172   for (i=0; i<maxlocal; i++) ltog[i] = -1;
173   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
174   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
175   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
176   ierr = PetscFree(globals);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
182 /*@
183     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
184     ordering and a global parallel ordering.
185 
186     Not Collective
187 
188     Input Parameters:
189 .   mapping - mapping data structure
190 
191     Output Parameter:
192 .   bs - the blocksize
193 
194     Level: advanced
195 
196     Concepts: mapping^local to global
197 
198 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
199 @*/
200 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
201 {
202   PetscFunctionBegin;
203   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
204   *bs = mapping->bs;
205   PetscFunctionReturn(0);
206 }
207 
208 #undef __FUNCT__
209 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
210 /*@
211     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
212     ordering and a global parallel ordering.
213 
214     Not Collective, but communicator may have more than one process
215 
216     Input Parameters:
217 +   comm - MPI communicator
218 .   bs - the block size
219 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
220 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
221 -   mode - see PetscCopyMode
222 
223     Output Parameter:
224 .   mapping - new mapping data structure
225 
226     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
227     Level: advanced
228 
229     Concepts: mapping^local to global
230 
231 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
232 @*/
233 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm comm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
234 {
235   PetscErrorCode ierr;
236   PetscInt       *in;
237 
238   PetscFunctionBegin;
239   if (n) PetscValidIntPointer(indices,3);
240   PetscValidPointer(mapping,4);
241 
242   *mapping = NULL;
243   ierr = ISInitializePackage();CHKERRQ(ierr);
244 
245   ierr = PetscHeaderCreate(*mapping,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
246                            comm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
247   (*mapping)->n             = n;
248   (*mapping)->bs            = bs;
249   (*mapping)->info_cached   = PETSC_FALSE;
250   (*mapping)->info_free     = PETSC_FALSE;
251   (*mapping)->info_procs    = NULL;
252   (*mapping)->info_numprocs = NULL;
253   (*mapping)->info_indices  = NULL;
254   /*
255     Do not create the global to local mapping. This is only created if
256     ISGlobalToLocalMapping() is called
257   */
258   (*mapping)->globals = 0;
259   if (mode == PETSC_COPY_VALUES) {
260     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
261     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
262     (*mapping)->indices = in;
263     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
264   } else if (mode == PETSC_OWN_POINTER) {
265     (*mapping)->indices = (PetscInt*)indices;
266     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
267   }
268   else SETERRQ(comm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
269   ierr = PetscStrallocpy("basic",&((PetscObject)*mapping)->type_name);CHKERRQ(ierr);
270   PetscFunctionReturn(0);
271 }
272 
273 #undef __FUNCT__
274 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
275 /*@
276    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
277    ordering and a global parallel ordering.
278 
279    Note Collective
280 
281    Input Parameters:
282 .  mapping - mapping data structure
283 
284    Level: advanced
285 
286 .seealso: ISLocalToGlobalMappingCreate()
287 @*/
288 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
289 {
290   PetscErrorCode ierr;
291 
292   PetscFunctionBegin;
293   if (!*mapping) PetscFunctionReturn(0);
294   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
295   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
296   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
297   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
298   ierr = PetscFree((*mapping)->info_procs);CHKERRQ(ierr);
299   ierr = PetscFree((*mapping)->info_numprocs);CHKERRQ(ierr);
300   if ((*mapping)->info_indices) {
301     PetscInt i;
302 
303     ierr = PetscFree(((*mapping)->info_indices)[0]);CHKERRQ(ierr);
304     for (i=1; i<(*mapping)->info_nproc; i++) {
305       ierr = PetscFree(((*mapping)->info_indices)[i]);CHKERRQ(ierr);
306     }
307     ierr = PetscFree((*mapping)->info_indices);CHKERRQ(ierr);
308   }
309   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
310   *mapping = 0;
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
316 /*@
317     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
318     a new index set using the global numbering defined in an ISLocalToGlobalMapping
319     context.
320 
321     Not collective
322 
323     Input Parameters:
324 +   mapping - mapping between local and global numbering
325 -   is - index set in local numbering
326 
327     Output Parameters:
328 .   newis - index set in global numbering
329 
330     Level: advanced
331 
332     Concepts: mapping^local to global
333 
334 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
335           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
336 @*/
337 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
338 {
339   PetscErrorCode ierr;
340   PetscInt       n,*idxout;
341   const PetscInt *idxin;
342 
343   PetscFunctionBegin;
344   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
345   PetscValidHeaderSpecific(is,IS_CLASSID,2);
346   PetscValidPointer(newis,3);
347 
348   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
349   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
350   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
351   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
352   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
353   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
354   PetscFunctionReturn(0);
355 }
356 
357 #undef __FUNCT__
358 #define __FUNCT__ "ISLocalToGlobalMappingApply"
359 /*@
360    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
361    and converts them to the global numbering.
362 
363    Not collective
364 
365    Input Parameters:
366 +  mapping - the local to global mapping context
367 .  N - number of integers
368 -  in - input indices in local numbering
369 
370    Output Parameter:
371 .  out - indices in global numbering
372 
373    Notes:
374    The in and out array parameters may be identical.
375 
376    Level: advanced
377 
378 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
379           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
380           AOPetscToApplication(), ISGlobalToLocalMappingApply()
381 
382     Concepts: mapping^local to global
383 @*/
384 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
385 {
386   PetscInt i,bs,Nmax;
387 
388   PetscFunctionBegin;
389   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
390   bs   = mapping->bs;
391   Nmax = bs*mapping->n;
392   if (bs == 1) {
393     const PetscInt *idx = mapping->indices;
394     for (i=0; i<N; i++) {
395       if (in[i] < 0) {
396         out[i] = in[i];
397         continue;
398       }
399       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
400       out[i] = idx[in[i]];
401     }
402   } else {
403     const PetscInt *idx = mapping->indices;
404     for (i=0; i<N; i++) {
405       if (in[i] < 0) {
406         out[i] = in[i];
407         continue;
408       }
409       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
410       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
411     }
412   }
413   PetscFunctionReturn(0);
414 }
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
418 /*@
419    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
420 
421    Not collective
422 
423    Input Parameters:
424 +  mapping - the local to global mapping context
425 .  N - number of integers
426 -  in - input indices in local block numbering
427 
428    Output Parameter:
429 .  out - indices in global block numbering
430 
431    Notes:
432    The in and out array parameters may be identical.
433 
434    Example:
435      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
436      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
437 
438    Level: advanced
439 
440 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
441           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
442           AOPetscToApplication(), ISGlobalToLocalMappingApply()
443 
444     Concepts: mapping^local to global
445 @*/
446 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
447 {
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
451   {
452     PetscInt i,Nmax = mapping->n;
453     const PetscInt *idx = mapping->indices;
454 
455     for (i=0; i<N; i++) {
456       if (in[i] < 0) {
457         out[i] = in[i];
458         continue;
459       }
460       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
461       out[i] = idx[in[i]];
462     }
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 /* -----------------------------------------------------------------------------------------*/
468 
469 #undef __FUNCT__
470 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
471 /*
472     Creates the global fields in the ISLocalToGlobalMapping structure
473 */
474 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
475 {
476   PetscErrorCode ierr;
477   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
478 
479   PetscFunctionBegin;
480   end   = 0;
481   start = PETSC_MAX_INT;
482 
483   for (i=0; i<n; i++) {
484     if (idx[i] < 0) continue;
485     if (idx[i] < start) start = idx[i];
486     if (idx[i] > end)   end   = idx[i];
487   }
488   if (start > end) {start = 0; end = -1;}
489   mapping->globalstart = start;
490   mapping->globalend   = end;
491 
492   ierr             = PetscMalloc1(end-start+2,&globals);CHKERRQ(ierr);
493   mapping->globals = globals;
494   for (i=0; i<end-start+1; i++) globals[i] = -1;
495   for (i=0; i<n; i++) {
496     if (idx[i] < 0) continue;
497     globals[idx[i] - start] = i;
498   }
499 
500   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "ISGlobalToLocalMappingApply"
506 /*@
507     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
508     specified with a global numbering.
509 
510     Not collective
511 
512     Input Parameters:
513 +   mapping - mapping between local and global numbering
514 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
515            IS_GTOLM_DROP - drops the indices with no local value from the output list
516 .   n - number of global indices to map
517 -   idx - global indices to map
518 
519     Output Parameters:
520 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
521 -   idxout - local index of each global index, one must pass in an array long enough
522              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
523              idxout == NULL to determine the required length (returned in nout)
524              and then allocate the required space and call ISGlobalToLocalMappingApply()
525              a second time to set the values.
526 
527     Notes:
528     Either nout or idxout may be NULL. idx and idxout may be identical.
529 
530     This is not scalable in memory usage. Each processor requires O(Nglobal) size
531     array to compute these.
532 
533     Level: advanced
534 
535     Developer Note: The manual page states that idx and idxout may be identical but the calling
536        sequence declares idx as const so it cannot be the same as idxout.
537 
538     Concepts: mapping^global to local
539 
540 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
541           ISLocalToGlobalMappingDestroy()
542 @*/
543 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
544                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
545 {
546   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
547   PetscErrorCode ierr;
548 
549   PetscFunctionBegin;
550   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
551   if (!mapping->globals) {
552     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
553   }
554   globals = mapping->globals;
555   start   = mapping->globalstart;
556   end     = mapping->globalend;
557   bs      = mapping->bs;
558 
559   if (type == IS_GTOLM_MASK) {
560     if (idxout) {
561       for (i=0; i<n; i++) {
562         if (idx[i] < 0)                   idxout[i] = idx[i];
563         else if (idx[i] < bs*start)       idxout[i] = -1;
564         else if (idx[i] > bs*(end+1)-1)   idxout[i] = -1;
565         else                              idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
566       }
567     }
568     if (nout) *nout = n;
569   } else {
570     if (idxout) {
571       for (i=0; i<n; i++) {
572         if (idx[i] < 0) continue;
573         if (idx[i] < bs*start) continue;
574         if (idx[i] > bs*(end+1)-1) continue;
575         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
576         if (tmp < 0) continue;
577         idxout[nf++] = tmp;
578       }
579     } else {
580       for (i=0; i<n; i++) {
581         if (idx[i] < 0) continue;
582         if (idx[i] < bs*start) continue;
583         if (idx[i] > bs*(end+1)-1) continue;
584         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
585         if (tmp < 0) continue;
586         nf++;
587       }
588     }
589     if (nout) *nout = nf;
590   }
591   PetscFunctionReturn(0);
592 }
593 
594 #undef __FUNCT__
595 #define __FUNCT__ "ISGlobalToLocalMappingApplyIS"
596 /*@
597     ISGlobalToLocalMappingApplyIS - Creates from an IS in the global numbering
598     a new index set using the local numbering defined in an ISLocalToGlobalMapping
599     context.
600 
601     Not collective
602 
603     Input Parameters:
604 +   mapping - mapping between local and global numbering
605 -   is - index set in global numbering
606 
607     Output Parameters:
608 .   newis - index set in local numbering
609 
610     Level: advanced
611 
612     Concepts: mapping^local to global
613 
614 .seealso: ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
615           ISLocalToGlobalMappingDestroy()
616 @*/
617 PetscErrorCode  ISGlobalToLocalMappingApplyIS(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type, IS is,IS *newis)
618 {
619   PetscErrorCode ierr;
620   PetscInt       n,nout,*idxout;
621   const PetscInt *idxin;
622 
623   PetscFunctionBegin;
624   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
625   PetscValidHeaderSpecific(is,IS_CLASSID,3);
626   PetscValidPointer(newis,4);
627 
628   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
629   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
630   if (type == IS_GTOLM_MASK) {
631     ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
632   } else {
633     ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,NULL);CHKERRQ(ierr);
634     ierr = PetscMalloc1(nout,&idxout);CHKERRQ(ierr);
635   }
636   ierr = ISGlobalToLocalMappingApply(mapping,type,n,idxin,&nout,idxout);CHKERRQ(ierr);
637   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
638   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),nout,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
639   PetscFunctionReturn(0);
640 }
641 
642 #undef __FUNCT__
643 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
644 /*@
645     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
646     specified with a block global numbering.
647 
648     Not collective
649 
650     Input Parameters:
651 +   mapping - mapping between local and global numbering
652 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
653            IS_GTOLM_DROP - drops the indices with no local value from the output list
654 .   n - number of global indices to map
655 -   idx - global indices to map
656 
657     Output Parameters:
658 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
659 -   idxout - local index of each global index, one must pass in an array long enough
660              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
661              idxout == NULL to determine the required length (returned in nout)
662              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
663              a second time to set the values.
664 
665     Notes:
666     Either nout or idxout may be NULL. idx and idxout may be identical.
667 
668     This is not scalable in memory usage. Each processor requires O(Nglobal) size
669     array to compute these.
670 
671     Level: advanced
672 
673     Developer Note: The manual page states that idx and idxout may be identical but the calling
674        sequence declares idx as const so it cannot be the same as idxout.
675 
676     Concepts: mapping^global to local
677 
678 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
679           ISLocalToGlobalMappingDestroy()
680 @*/
681 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
682                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
683 {
684   PetscInt       i,*globals,nf = 0,tmp,start,end;
685   PetscErrorCode ierr;
686 
687   PetscFunctionBegin;
688   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
689   if (!mapping->globals) {
690     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
691   }
692   globals = mapping->globals;
693   start   = mapping->globalstart;
694   end     = mapping->globalend;
695 
696   if (type == IS_GTOLM_MASK) {
697     if (idxout) {
698       for (i=0; i<n; i++) {
699         if (idx[i] < 0) idxout[i] = idx[i];
700         else if (idx[i] < start) idxout[i] = -1;
701         else if (idx[i] > end)   idxout[i] = -1;
702         else                     idxout[i] = globals[idx[i] - start];
703       }
704     }
705     if (nout) *nout = n;
706   } else {
707     if (idxout) {
708       for (i=0; i<n; i++) {
709         if (idx[i] < 0) continue;
710         if (idx[i] < start) continue;
711         if (idx[i] > end) continue;
712         tmp = globals[idx[i] - start];
713         if (tmp < 0) continue;
714         idxout[nf++] = tmp;
715       }
716     } else {
717       for (i=0; i<n; i++) {
718         if (idx[i] < 0) continue;
719         if (idx[i] < start) continue;
720         if (idx[i] > end) continue;
721         tmp = globals[idx[i] - start];
722         if (tmp < 0) continue;
723         nf++;
724       }
725     }
726     if (nout) *nout = nf;
727   }
728   PetscFunctionReturn(0);
729 }
730 
731 #undef __FUNCT__
732 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
733 /*@C
734     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
735      each index shared by more than one processor
736 
737     Collective on ISLocalToGlobalMapping
738 
739     Input Parameters:
740 .   mapping - the mapping from local to global indexing
741 
742     Output Parameter:
743 +   nproc - number of processors that are connected to this one
744 .   proc - neighboring processors
745 .   numproc - number of indices for each subdomain (processor)
746 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
747 
748     Level: advanced
749 
750     Concepts: mapping^local to global
751 
752     Fortran Usage:
753 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
754 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
755           PetscInt indices[nproc][numprocmax],ierr)
756         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
757         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
758 
759 
760 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
761           ISLocalToGlobalMappingRestoreInfo()
762 @*/
763 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
769   if (mapping->info_cached) {
770     *nproc = mapping->info_nproc;
771     *procs = mapping->info_procs;
772     *numprocs = mapping->info_numprocs;
773     *indices = mapping->info_indices;
774   } else {
775     ierr = ISLocalToGlobalMappingGetBlockInfo_Private(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
776   }
777   PetscFunctionReturn(0);
778 }
779 
780 #undef __FUNCT__
781 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo_Private"
782 static PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo_Private(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
783 {
784   PetscErrorCode ierr;
785   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
786   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
787   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
788   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
789   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
790   PetscInt       first_procs,first_numprocs,*first_indices;
791   MPI_Request    *recv_waits,*send_waits;
792   MPI_Status     recv_status,*send_status,*recv_statuses;
793   MPI_Comm       comm;
794   PetscBool      debug = PETSC_FALSE;
795 
796   PetscFunctionBegin;
797   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
798   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
799   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
800   if (size == 1) {
801     *nproc         = 0;
802     *procs         = NULL;
803     ierr           = PetscNew(numprocs);CHKERRQ(ierr);
804     (*numprocs)[0] = 0;
805     ierr           = PetscNew(indices);CHKERRQ(ierr);
806     (*indices)[0]  = NULL;
807     /* save info for reuse */
808     mapping->info_nproc = *nproc;
809     mapping->info_procs = *procs;
810     mapping->info_numprocs = *numprocs;
811     mapping->info_indices = *indices;
812     mapping->info_cached = PETSC_TRUE;
813     PetscFunctionReturn(0);
814   }
815 
816   ierr = PetscOptionsGetBool(((PetscObject)mapping)->options,NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
817 
818   /*
819     Notes on ISLocalToGlobalMappingGetBlockInfo
820 
821     globally owned node - the nodes that have been assigned to this processor in global
822            numbering, just for this routine.
823 
824     nontrivial globally owned node - node assigned to this processor that is on a subdomain
825            boundary (i.e. is has more than one local owner)
826 
827     locally owned node - node that exists on this processors subdomain
828 
829     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
830            local subdomain
831   */
832   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
833   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
834   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
835 
836   for (i=0; i<n; i++) {
837     if (lindices[i] > max) max = lindices[i];
838   }
839   ierr   = MPIU_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
840   Ng++;
841   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
842   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
843   scale  = Ng/size + 1;
844   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
845   rstart = scale*rank;
846 
847   /* determine ownership ranges of global indices */
848   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
849   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
850 
851   /* determine owners of each local node  */
852   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
853   for (i=0; i<n; i++) {
854     proc             = lindices[i]/scale; /* processor that globally owns this index */
855     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
856     owner[i]         = proc;
857     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
858   }
859   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
860   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
861 
862   /* inform other processors of number of messages and max length*/
863   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
864   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
865 
866   /* post receives for owned rows */
867   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
868   ierr = PetscMalloc1(nrecvs+1,&recv_waits);CHKERRQ(ierr);
869   for (i=0; i<nrecvs; i++) {
870     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
871   }
872 
873   /* pack messages containing lists of local nodes to owners */
874   ierr      = PetscMalloc1(2*n+1,&sends);CHKERRQ(ierr);
875   ierr      = PetscMalloc1(size+1,&starts);CHKERRQ(ierr);
876   starts[0] = 0;
877   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
878   for (i=0; i<n; i++) {
879     sends[starts[owner[i]]++] = lindices[i];
880     sends[starts[owner[i]]++] = i;
881   }
882   ierr = PetscFree(owner);CHKERRQ(ierr);
883   starts[0] = 0;
884   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
885 
886   /* send the messages */
887   ierr = PetscMalloc1(nsends+1,&send_waits);CHKERRQ(ierr);
888   ierr = PetscMalloc1(nsends+1,&dest);CHKERRQ(ierr);
889   cnt = 0;
890   for (i=0; i<size; i++) {
891     if (nprocs[2*i]) {
892       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
893       dest[cnt] = i;
894       cnt++;
895     }
896   }
897   ierr = PetscFree(starts);CHKERRQ(ierr);
898 
899   /* wait on receives */
900   ierr = PetscMalloc1(nrecvs+1,&source);CHKERRQ(ierr);
901   ierr = PetscMalloc1(nrecvs+1,&len);CHKERRQ(ierr);
902   cnt  = nrecvs;
903   ierr = PetscMalloc1(ng+1,&nownedsenders);CHKERRQ(ierr);
904   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
905   while (cnt) {
906     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
907     /* unpack receives into our local space */
908     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
909     source[imdex] = recv_status.MPI_SOURCE;
910     len[imdex]    = len[imdex]/2;
911     /* count how many local owners for each of my global owned indices */
912     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
913     cnt--;
914   }
915   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
916 
917   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
918   nowned  = 0;
919   nownedm = 0;
920   for (i=0; i<ng; i++) {
921     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
922   }
923 
924   /* create single array to contain rank of all local owners of each globally owned index */
925   ierr      = PetscMalloc1(nownedm+1,&ownedsenders);CHKERRQ(ierr);
926   ierr      = PetscMalloc1(ng+1,&starts);CHKERRQ(ierr);
927   starts[0] = 0;
928   for (i=1; i<ng; i++) {
929     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
930     else starts[i] = starts[i-1];
931   }
932 
933   /* for each nontrival globally owned node list all arriving processors */
934   for (i=0; i<nrecvs; i++) {
935     for (j=0; j<len[i]; j++) {
936       node = recvs[2*i*nmax+2*j]-rstart;
937       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
938     }
939   }
940 
941   if (debug) { /* -----------------------------------  */
942     starts[0] = 0;
943     for (i=1; i<ng; i++) {
944       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
945       else starts[i] = starts[i-1];
946     }
947     for (i=0; i<ng; i++) {
948       if (nownedsenders[i] > 1) {
949         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
950         for (j=0; j<nownedsenders[i]; j++) {
951           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
952         }
953         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
954       }
955     }
956     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
957   } /* -----------------------------------  */
958 
959   /* wait on original sends */
960   if (nsends) {
961     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
962     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
963     ierr = PetscFree(send_status);CHKERRQ(ierr);
964   }
965   ierr = PetscFree(send_waits);CHKERRQ(ierr);
966   ierr = PetscFree(sends);CHKERRQ(ierr);
967   ierr = PetscFree(nprocs);CHKERRQ(ierr);
968 
969   /* pack messages to send back to local owners */
970   starts[0] = 0;
971   for (i=1; i<ng; i++) {
972     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
973     else starts[i] = starts[i-1];
974   }
975   nsends2 = nrecvs;
976   ierr    = PetscMalloc1(nsends2+1,&nprocs);CHKERRQ(ierr); /* length of each message */
977   for (i=0; i<nrecvs; i++) {
978     nprocs[i] = 1;
979     for (j=0; j<len[i]; j++) {
980       node = recvs[2*i*nmax+2*j]-rstart;
981       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
982     }
983   }
984   nt = 0;
985   for (i=0; i<nsends2; i++) nt += nprocs[i];
986 
987   ierr = PetscMalloc1(nt+1,&sends2);CHKERRQ(ierr);
988   ierr = PetscMalloc1(nsends2+1,&starts2);CHKERRQ(ierr);
989 
990   starts2[0] = 0;
991   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
992   /*
993      Each message is 1 + nprocs[i] long, and consists of
994        (0) the number of nodes being sent back
995        (1) the local node number,
996        (2) the number of processors sharing it,
997        (3) the processors sharing it
998   */
999   for (i=0; i<nsends2; i++) {
1000     cnt = 1;
1001     sends2[starts2[i]] = 0;
1002     for (j=0; j<len[i]; j++) {
1003       node = recvs[2*i*nmax+2*j]-rstart;
1004       if (nownedsenders[node] > 1) {
1005         sends2[starts2[i]]++;
1006         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
1007         sends2[starts2[i]+cnt++] = nownedsenders[node];
1008         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
1009         cnt += nownedsenders[node];
1010       }
1011     }
1012   }
1013 
1014   /* receive the message lengths */
1015   nrecvs2 = nsends;
1016   ierr    = PetscMalloc1(nrecvs2+1,&lens2);CHKERRQ(ierr);
1017   ierr    = PetscMalloc1(nrecvs2+1,&starts3);CHKERRQ(ierr);
1018   ierr    = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1019   for (i=0; i<nrecvs2; i++) {
1020     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
1021   }
1022 
1023   /* send the message lengths */
1024   for (i=0; i<nsends2; i++) {
1025     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
1026   }
1027 
1028   /* wait on receives of lens */
1029   if (nrecvs2) {
1030     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1031     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1032     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1033   }
1034   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1035 
1036   starts3[0] = 0;
1037   nt         = 0;
1038   for (i=0; i<nrecvs2-1; i++) {
1039     starts3[i+1] = starts3[i] + lens2[i];
1040     nt          += lens2[i];
1041   }
1042   if (nrecvs2) nt += lens2[nrecvs2-1];
1043 
1044   ierr = PetscMalloc1(nt+1,&recvs2);CHKERRQ(ierr);
1045   ierr = PetscMalloc1(nrecvs2+1,&recv_waits);CHKERRQ(ierr);
1046   for (i=0; i<nrecvs2; i++) {
1047     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
1048   }
1049 
1050   /* send the messages */
1051   ierr = PetscMalloc1(nsends2+1,&send_waits);CHKERRQ(ierr);
1052   for (i=0; i<nsends2; i++) {
1053     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
1054   }
1055 
1056   /* wait on receives */
1057   if (nrecvs2) {
1058     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
1059     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
1060     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
1061   }
1062   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
1063   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1064 
1065   if (debug) { /* -----------------------------------  */
1066     cnt = 0;
1067     for (i=0; i<nrecvs2; i++) {
1068       nt = recvs2[cnt++];
1069       for (j=0; j<nt; j++) {
1070         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
1071         for (k=0; k<recvs2[cnt+1]; k++) {
1072           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
1073         }
1074         cnt += 2 + recvs2[cnt+1];
1075         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1076       }
1077     }
1078     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1079   } /* -----------------------------------  */
1080 
1081   /* count number subdomains for each local node */
1082   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1083   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1084   cnt  = 0;
1085   for (i=0; i<nrecvs2; i++) {
1086     nt = recvs2[cnt++];
1087     for (j=0; j<nt; j++) {
1088       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1089       cnt += 2 + recvs2[cnt+1];
1090     }
1091   }
1092   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1093   *nproc    = nt;
1094   ierr = PetscMalloc1(nt+1,procs);CHKERRQ(ierr);
1095   ierr = PetscMalloc1(nt+1,numprocs);CHKERRQ(ierr);
1096   ierr = PetscMalloc1(nt+1,indices);CHKERRQ(ierr);
1097   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1098   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1099   cnt  = 0;
1100   for (i=0; i<size; i++) {
1101     if (nprocs[i] > 0) {
1102       bprocs[i]        = cnt;
1103       (*procs)[cnt]    = i;
1104       (*numprocs)[cnt] = nprocs[i];
1105       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1106       cnt++;
1107     }
1108   }
1109 
1110   /* make the list of subdomains for each nontrivial local node */
1111   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1112   cnt  = 0;
1113   for (i=0; i<nrecvs2; i++) {
1114     nt = recvs2[cnt++];
1115     for (j=0; j<nt; j++) {
1116       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1117       cnt += 2 + recvs2[cnt+1];
1118     }
1119   }
1120   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1121   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1122 
1123   /* sort the node indexing by their global numbers */
1124   nt = *nproc;
1125   for (i=0; i<nt; i++) {
1126     ierr = PetscMalloc1((*numprocs)[i],&tmp);CHKERRQ(ierr);
1127     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1128     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1129     ierr = PetscFree(tmp);CHKERRQ(ierr);
1130   }
1131 
1132   if (debug) { /* -----------------------------------  */
1133     nt = *nproc;
1134     for (i=0; i<nt; i++) {
1135       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1136       for (j=0; j<(*numprocs)[i]; j++) {
1137         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1138       }
1139       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1140     }
1141     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1142   } /* -----------------------------------  */
1143 
1144   /* wait on sends */
1145   if (nsends2) {
1146     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1147     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1148     ierr = PetscFree(send_status);CHKERRQ(ierr);
1149   }
1150 
1151   ierr = PetscFree(starts3);CHKERRQ(ierr);
1152   ierr = PetscFree(dest);CHKERRQ(ierr);
1153   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1154 
1155   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1156   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1157   ierr = PetscFree(starts);CHKERRQ(ierr);
1158   ierr = PetscFree(starts2);CHKERRQ(ierr);
1159   ierr = PetscFree(lens2);CHKERRQ(ierr);
1160 
1161   ierr = PetscFree(source);CHKERRQ(ierr);
1162   ierr = PetscFree(len);CHKERRQ(ierr);
1163   ierr = PetscFree(recvs);CHKERRQ(ierr);
1164   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1165   ierr = PetscFree(sends2);CHKERRQ(ierr);
1166 
1167   /* put the information about myself as the first entry in the list */
1168   first_procs    = (*procs)[0];
1169   first_numprocs = (*numprocs)[0];
1170   first_indices  = (*indices)[0];
1171   for (i=0; i<*nproc; i++) {
1172     if ((*procs)[i] == rank) {
1173       (*procs)[0]    = (*procs)[i];
1174       (*numprocs)[0] = (*numprocs)[i];
1175       (*indices)[0]  = (*indices)[i];
1176       (*procs)[i]    = first_procs;
1177       (*numprocs)[i] = first_numprocs;
1178       (*indices)[i]  = first_indices;
1179       break;
1180     }
1181   }
1182 
1183   /* save info for reuse */
1184   mapping->info_nproc = *nproc;
1185   mapping->info_procs = *procs;
1186   mapping->info_numprocs = *numprocs;
1187   mapping->info_indices = *indices;
1188   mapping->info_cached = PETSC_TRUE;
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 #undef __FUNCT__
1193 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1194 /*@C
1195     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1196 
1197     Collective on ISLocalToGlobalMapping
1198 
1199     Input Parameters:
1200 .   mapping - the mapping from local to global indexing
1201 
1202     Output Parameter:
1203 +   nproc - number of processors that are connected to this one
1204 .   proc - neighboring processors
1205 .   numproc - number of indices for each processor
1206 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1207 
1208     Level: advanced
1209 
1210 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1211           ISLocalToGlobalMappingGetInfo()
1212 @*/
1213 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1214 {
1215   PetscErrorCode ierr;
1216 
1217   PetscFunctionBegin;
1218   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1219   if (mapping->info_free) {
1220     ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1221     if (*indices) {
1222       PetscInt i;
1223 
1224       ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1225       for (i=1; i<*nproc; i++) {
1226         ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1227       }
1228       ierr = PetscFree(*indices);CHKERRQ(ierr);
1229     }
1230   }
1231   *nproc = 0;
1232   *procs = NULL;
1233   *numprocs = NULL;
1234   *indices = NULL;
1235   PetscFunctionReturn(0);
1236 }
1237 
1238 #undef __FUNCT__
1239 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1240 /*@C
1241     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1242      each index shared by more than one processor
1243 
1244     Collective on ISLocalToGlobalMapping
1245 
1246     Input Parameters:
1247 .   mapping - the mapping from local to global indexing
1248 
1249     Output Parameter:
1250 +   nproc - number of processors that are connected to this one
1251 .   proc - neighboring processors
1252 .   numproc - number of indices for each subdomain (processor)
1253 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1254 
1255     Level: advanced
1256 
1257     Concepts: mapping^local to global
1258 
1259     Fortran Usage:
1260 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1261 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1262           PetscInt indices[nproc][numprocmax],ierr)
1263         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1264         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1265 
1266 
1267 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1268           ISLocalToGlobalMappingRestoreInfo()
1269 @*/
1270 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1271 {
1272   PetscErrorCode ierr;
1273   PetscInt       **bindices = NULL,*bnumprocs = NULL,bs = mapping->bs,i,j,k;
1274 
1275   PetscFunctionBegin;
1276   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1277   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,&bnumprocs,&bindices);CHKERRQ(ierr);
1278   if (bs > 1) { /* we need to expand the cached info */
1279     ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1280     ierr = PetscCalloc1(*nproc,&*numprocs);CHKERRQ(ierr);
1281     for (i=0; i<*nproc; i++) {
1282       ierr = PetscMalloc1(bs*bnumprocs[i],&(*indices)[i]);CHKERRQ(ierr);
1283       for (j=0; j<bnumprocs[i]; j++) {
1284         for (k=0; k<bs; k++) {
1285           (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1286         }
1287       }
1288       (*numprocs)[i] = bnumprocs[i]*bs;
1289     }
1290     mapping->info_free = PETSC_TRUE;
1291   } else {
1292     *numprocs = bnumprocs;
1293     *indices  = bindices;
1294   }
1295   PetscFunctionReturn(0);
1296 }
1297 
1298 #undef __FUNCT__
1299 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1300 /*@C
1301     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1302 
1303     Collective on ISLocalToGlobalMapping
1304 
1305     Input Parameters:
1306 .   mapping - the mapping from local to global indexing
1307 
1308     Output Parameter:
1309 +   nproc - number of processors that are connected to this one
1310 .   proc - neighboring processors
1311 .   numproc - number of indices for each processor
1312 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1313 
1314     Level: advanced
1315 
1316 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1317           ISLocalToGlobalMappingGetInfo()
1318 @*/
1319 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1320 {
1321   PetscErrorCode ierr;
1322 
1323   PetscFunctionBegin;
1324   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1325   PetscFunctionReturn(0);
1326 }
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1330 /*@C
1331    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1332 
1333    Not Collective
1334 
1335    Input Arguments:
1336 . ltog - local to global mapping
1337 
1338    Output Arguments:
1339 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1340 
1341    Level: advanced
1342 
1343    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1344 
1345 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1346 @*/
1347 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1348 {
1349   PetscFunctionBegin;
1350   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1351   PetscValidPointer(array,2);
1352   if (ltog->bs == 1) {
1353     *array = ltog->indices;
1354   } else {
1355     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1356     const PetscInt *ii;
1357     PetscErrorCode ierr;
1358 
1359     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1360     *array = jj;
1361     k    = 0;
1362     ii   = ltog->indices;
1363     for (i=0; i<n; i++)
1364       for (j=0; j<bs; j++)
1365         jj[k++] = bs*ii[i] + j;
1366   }
1367   PetscFunctionReturn(0);
1368 }
1369 
1370 #undef __FUNCT__
1371 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1372 /*@C
1373    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingGetIndices()
1374 
1375    Not Collective
1376 
1377    Input Arguments:
1378 + ltog - local to global mapping
1379 - array - array of indices
1380 
1381    Level: advanced
1382 
1383 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1384 @*/
1385 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1386 {
1387   PetscFunctionBegin;
1388   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1389   PetscValidPointer(array,2);
1390   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1391 
1392   if (ltog->bs > 1) {
1393     PetscErrorCode ierr;
1394     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1395   }
1396   PetscFunctionReturn(0);
1397 }
1398 
1399 #undef __FUNCT__
1400 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1401 /*@C
1402    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1403 
1404    Not Collective
1405 
1406    Input Arguments:
1407 . ltog - local to global mapping
1408 
1409    Output Arguments:
1410 . array - array of indices
1411 
1412    Level: advanced
1413 
1414 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1415 @*/
1416 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1417 {
1418   PetscFunctionBegin;
1419   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1420   PetscValidPointer(array,2);
1421   *array = ltog->indices;
1422   PetscFunctionReturn(0);
1423 }
1424 
1425 #undef __FUNCT__
1426 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1427 /*@C
1428    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1429 
1430    Not Collective
1431 
1432    Input Arguments:
1433 + ltog - local to global mapping
1434 - array - array of indices
1435 
1436    Level: advanced
1437 
1438 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1439 @*/
1440 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1441 {
1442   PetscFunctionBegin;
1443   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1444   PetscValidPointer(array,2);
1445   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1446   *array = NULL;
1447   PetscFunctionReturn(0);
1448 }
1449 
1450 #undef __FUNCT__
1451 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1452 /*@C
1453    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1454 
1455    Not Collective
1456 
1457    Input Arguments:
1458 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1459 . n - number of mappings to concatenate
1460 - ltogs - local to global mappings
1461 
1462    Output Arguments:
1463 . ltogcat - new mapping
1464 
1465    Note: this currently always returns a mapping with block size of 1
1466 
1467    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1468 
1469    Level: advanced
1470 
1471 .seealso: ISLocalToGlobalMappingCreate()
1472 @*/
1473 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1474 {
1475   PetscInt       i,cnt,m,*idx;
1476   PetscErrorCode ierr;
1477 
1478   PetscFunctionBegin;
1479   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1480   if (n > 0) PetscValidPointer(ltogs,3);
1481   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1482   PetscValidPointer(ltogcat,4);
1483   for (cnt=0,i=0; i<n; i++) {
1484     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1485     cnt += m;
1486   }
1487   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1488   for (cnt=0,i=0; i<n; i++) {
1489     const PetscInt *subidx;
1490     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1491     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1492     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1493     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1494     cnt += m;
1495   }
1496   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 
1501