xref: /petsc/src/vec/is/utils/isltog.c (revision 2e826d6f168293946e7dc17f87075462a7cd4791)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 #include <petscsf.h>
4 #include <petscviewer.h>
5 
6 PetscClassId IS_LTOGM_CLASSID;
7 
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "ISG2LMapApply"
11 PetscErrorCode ISG2LMapApply(ISLocalToGlobalMapping mapping,PetscInt n,const PetscInt in[],PetscInt out[])
12 {
13   PetscErrorCode ierr;
14   PetscInt       i,start,end;
15 
16   PetscFunctionBegin;
17   if (!mapping->globals) {
18     ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,0,0,0,0);CHKERRQ(ierr);
19   }
20   start = mapping->globalstart;
21   end = mapping->globalend;
22   for (i=0; i<n; i++) {
23     if (in[i] < 0)          out[i] = in[i];
24     else if (in[i] < start) out[i] = -1;
25     else if (in[i] > end)   out[i] = -1;
26     else                    out[i] = mapping->globals[in[i] - start];
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
34 /*@
35     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping
36 
37     Not Collective
38 
39     Input Parameter:
40 .   ltog - local to global mapping
41 
42     Output Parameter:
43 .   n - the number of entries in the local mapping, ISLocalToGlobalMappingGetIndices() returns an array of this length
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
52 {
53   PetscFunctionBegin;
54   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
55   PetscValidIntPointer(n,2);
56   *n = mapping->bs*mapping->n;
57   PetscFunctionReturn(0);
58 }
59 
60 #undef __FUNCT__
61 #define __FUNCT__ "ISLocalToGlobalMappingView"
62 /*@C
63     ISLocalToGlobalMappingView - View a local to global mapping
64 
65     Not Collective
66 
67     Input Parameters:
68 +   ltog - local to global mapping
69 -   viewer - viewer
70 
71     Level: advanced
72 
73     Concepts: mapping^local to global
74 
75 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
76 @*/
77 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
78 {
79   PetscInt       i;
80   PetscMPIInt    rank;
81   PetscBool      iascii;
82   PetscErrorCode ierr;
83 
84   PetscFunctionBegin;
85   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
86   if (!viewer) {
87     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mapping),&viewer);CHKERRQ(ierr);
88   }
89   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
90 
91   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)mapping),&rank);CHKERRQ(ierr);
92   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
93   if (iascii) {
94     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)mapping,viewer);CHKERRQ(ierr);
95     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
96     for (i=0; i<mapping->n; i++) {
97       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D %D\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
98     }
99     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
100     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
101   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
107 /*@
108     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
109     ordering and a global parallel ordering.
110 
111     Not collective
112 
113     Input Parameter:
114 .   is - index set containing the global numbers for each local number
115 
116     Output Parameter:
117 .   mapping - new mapping data structure
118 
119     Notes: the block size of the IS determines the block size of the mapping
120     Level: advanced
121 
122     Concepts: mapping^local to global
123 
124 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
125 @*/
126 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
127 {
128   PetscErrorCode ierr;
129   PetscInt       n,bs;
130   const PetscInt *indices;
131   MPI_Comm       comm;
132   PetscBool      isblock;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(is,IS_CLASSID,1);
136   PetscValidPointer(mapping,2);
137 
138   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
139   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
140   ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr);
141   if (!isblock) {
142     ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
143     ierr = ISLocalToGlobalMappingCreate(comm,1,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
144     ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
145   } else {
146     ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
147     ierr = ISBlockGetIndices(is,&indices);CHKERRQ(ierr);
148     ierr = ISLocalToGlobalMappingCreate(comm,bs,n/bs,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
149     ierr = ISBlockRestoreIndices(is,&indices);CHKERRQ(ierr);
150   }
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
156 /*@C
157     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Collective
161 
162     Input Parameter:
163 +   sf - star forest mapping contiguous local indices to (rank, offset)
164 -   start - first global index on this process
165 
166     Output Parameter:
167 .   mapping - new mapping data structure
168 
169     Level: advanced
170 
171     Concepts: mapping^local to global
172 
173 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
174 @*/
175 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
176 {
177   PetscErrorCode ierr;
178   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
179   const PetscInt *ilocal;
180   MPI_Comm       comm;
181 
182   PetscFunctionBegin;
183   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
184   PetscValidPointer(mapping,3);
185 
186   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
187   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
188   if (ilocal) {
189     for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);
190   }
191   else maxlocal = nleaves;
192   ierr = PetscMalloc1(nroots,&globals);CHKERRQ(ierr);
193   ierr = PetscMalloc1(maxlocal,&ltog);CHKERRQ(ierr);
194   for (i=0; i<nroots; i++) globals[i] = start + i;
195   for (i=0; i<maxlocal; i++) ltog[i] = -1;
196   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
197   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
198   ierr = ISLocalToGlobalMappingCreate(comm,1,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
199   ierr = PetscFree(globals);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockSize"
205 /*@
206     ISLocalToGlobalMappingGetBlockSize - Gets the blocksize of the mapping
207     ordering and a global parallel ordering.
208 
209     Not Collective
210 
211     Input Parameters:
212 .   mapping - mapping data structure
213 
214     Output Parameter:
215 .   bs - the blocksize
216 
217     Level: advanced
218 
219     Concepts: mapping^local to global
220 
221 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
222 @*/
223 PetscErrorCode  ISLocalToGlobalMappingGetBlockSize(ISLocalToGlobalMapping mapping,PetscInt *bs)
224 {
225   PetscFunctionBegin;
226   *bs = mapping->bs;
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
232 /*@
233     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
234     ordering and a global parallel ordering.
235 
236     Not Collective, but communicator may have more than one process
237 
238     Input Parameters:
239 +   comm - MPI communicator
240 .   bs - the block size
241 .   n - the number of local elements divided by the block size, or equivalently the number of block indices
242 .   indices - the global index for each local element, these do not need to be in increasing order (sorted), these values should not be scaled (i.e. multiplied) by the blocksize bs
243 -   mode - see PetscCopyMode
244 
245     Output Parameter:
246 .   mapping - new mapping data structure
247 
248     Notes: There is one integer value in indices per block and it represents the actual indices bs*idx + j, where j=0,..,bs-1
249     Level: advanced
250 
251     Concepts: mapping^local to global
252 
253 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
254 @*/
255 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt bs,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
256 {
257   PetscErrorCode ierr;
258   PetscInt       *in;
259 
260   PetscFunctionBegin;
261   if (n) PetscValidIntPointer(indices,3);
262   PetscValidPointer(mapping,4);
263 
264   *mapping = NULL;
265   ierr = ISInitializePackage();CHKERRQ(ierr);
266 
267   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,"ISLocalToGlobalMapping","Local to global mapping","IS",
268                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
269   (*mapping)->n  = n;
270   (*mapping)->bs = bs;
271   /*
272     Do not create the global to local mapping. This is only created if
273     ISGlobalToLocalMapping() is called
274   */
275   (*mapping)->globals = 0;
276   if (mode == PETSC_COPY_VALUES) {
277     ierr = PetscMalloc1(n,&in);CHKERRQ(ierr);
278     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
279     (*mapping)->indices = in;
280     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
281   } else if (mode == PETSC_OWN_POINTER) {
282     (*mapping)->indices = (PetscInt*)indices;
283     ierr = PetscLogObjectMemory((PetscObject)*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
284   }
285   else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
291 /*@
292    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
293    ordering and a global parallel ordering.
294 
295    Note Collective
296 
297    Input Parameters:
298 .  mapping - mapping data structure
299 
300    Level: advanced
301 
302 .seealso: ISLocalToGlobalMappingCreate()
303 @*/
304 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
305 {
306   PetscErrorCode ierr;
307 
308   PetscFunctionBegin;
309   if (!*mapping) PetscFunctionReturn(0);
310   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
311   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
312   ierr     = PetscFree((*mapping)->indices);CHKERRQ(ierr);
313   ierr     = PetscFree((*mapping)->globals);CHKERRQ(ierr);
314   ierr     = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
315   *mapping = 0;
316   PetscFunctionReturn(0);
317 }
318 
319 #undef __FUNCT__
320 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
321 /*@
322     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
323     a new index set using the global numbering defined in an ISLocalToGlobalMapping
324     context.
325 
326     Not collective
327 
328     Input Parameters:
329 +   mapping - mapping between local and global numbering
330 -   is - index set in local numbering
331 
332     Output Parameters:
333 .   newis - index set in global numbering
334 
335     Level: advanced
336 
337     Concepts: mapping^local to global
338 
339 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
340           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
341 @*/
342 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
343 {
344   PetscErrorCode ierr;
345   PetscInt       n,*idxout;
346   const PetscInt *idxin;
347 
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
350   PetscValidHeaderSpecific(is,IS_CLASSID,2);
351   PetscValidPointer(newis,3);
352 
353   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
354   ierr = ISGetIndices(is,&idxin);CHKERRQ(ierr);
355   ierr = PetscMalloc1(n,&idxout);CHKERRQ(ierr);
356   ierr = ISLocalToGlobalMappingApply(mapping,n,idxin,idxout);CHKERRQ(ierr);
357   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
358   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 
362 #undef __FUNCT__
363 #define __FUNCT__ "ISLocalToGlobalMappingApply"
364 /*@
365    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
366    and converts them to the global numbering.
367 
368    Not collective
369 
370    Input Parameters:
371 +  mapping - the local to global mapping context
372 .  N - number of integers
373 -  in - input indices in local numbering
374 
375    Output Parameter:
376 .  out - indices in global numbering
377 
378    Notes:
379    The in and out array parameters may be identical.
380 
381    Level: advanced
382 
383 .seealso: ISLocalToGlobalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
384           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
385           AOPetscToApplication(), ISGlobalToLocalMappingApply()
386 
387     Concepts: mapping^local to global
388 @*/
389 PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
390 {
391   PetscInt       i,bs = mapping->bs,Nmax = bs*mapping->n;
392   const PetscInt *idx = mapping->indices;
393 
394   PetscFunctionBegin;
395   if (bs == 1) {
396     for (i=0; i<N; i++) {
397       if (in[i] < 0) {
398         out[i] = in[i];
399         continue;
400       }
401       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
402       out[i] = idx[in[i]];
403     }
404   } else {
405     for (i=0; i<N; i++) {
406       if (in[i] < 0) {
407         out[i] = in[i];
408         continue;
409       }
410       if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %D too large %D (max) at %D",in[i],Nmax-1,i);
411       out[i] = idx[in[i]/bs]*bs + (in[i] % bs);
412     }
413   }
414   PetscFunctionReturn(0);
415 }
416 
417 #undef __FUNCT__
418 #define __FUNCT__ "ISLocalToGlobalMappingApplyBlock"
419 /*@
420    ISLocalToGlobalMappingApplyBlock - Takes a list of integers in a local block numbering  and converts them to the global block numbering
421 
422    Not collective
423 
424    Input Parameters:
425 +  mapping - the local to global mapping context
426 .  N - number of integers
427 -  in - input indices in local block numbering
428 
429    Output Parameter:
430 .  out - indices in global block numbering
431 
432    Notes:
433    The in and out array parameters may be identical.
434 
435    Example:
436      If the index values are {0,1,6,7} set with a call to ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,2,2,{0,3}) then the mapping applied to 0
437      (the first block) would produce 0 and the mapping applied to 1 (the second block) would produce 3.
438 
439    Level: advanced
440 
441 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
442           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
443           AOPetscToApplication(), ISGlobalToLocalMappingApply()
444 
445     Concepts: mapping^local to global
446 @*/
447 PetscErrorCode ISLocalToGlobalMappingApplyBlock(ISLocalToGlobalMapping mapping,PetscInt N,const PetscInt in[],PetscInt out[])
448 {
449   PetscInt       i,Nmax = mapping->n;
450   const PetscInt *idx = mapping->indices;
451 
452   PetscFunctionBegin;
453   for (i=0; i<N; i++) {
454     if (in[i] < 0) {
455       out[i] = in[i];
456       continue;
457     }
458     if (in[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local block index %D too large %D (max) at %D",in[i],Nmax-1,i);
459     out[i] = idx[in[i]];
460   }
461   PetscFunctionReturn(0);
462 }
463 
464 /* -----------------------------------------------------------------------------------------*/
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
468 /*
469     Creates the global fields in the ISLocalToGlobalMapping structure
470 */
471 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
472 {
473   PetscErrorCode ierr;
474   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
475 
476   PetscFunctionBegin;
477   end   = 0;
478   start = PETSC_MAX_INT;
479 
480   for (i=0; i<n; i++) {
481     if (idx[i] < 0) continue;
482     if (idx[i] < start) start = idx[i];
483     if (idx[i] > end)   end   = idx[i];
484   }
485   if (start > end) {start = 0; end = -1;}
486   mapping->globalstart = start;
487   mapping->globalend   = end;
488 
489   ierr             = PetscMalloc1((end-start+2),&globals);CHKERRQ(ierr);
490   mapping->globals = globals;
491   for (i=0; i<end-start+1; i++) globals[i] = -1;
492   for (i=0; i<n; i++) {
493     if (idx[i] < 0) continue;
494     globals[idx[i] - start] = i;
495   }
496 
497   ierr = PetscLogObjectMemory((PetscObject)mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "ISGlobalToLocalMappingApply"
503 /*@
504     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
505     specified with a global numbering.
506 
507     Not collective
508 
509     Input Parameters:
510 +   mapping - mapping between local and global numbering
511 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
512            IS_GTOLM_DROP - drops the indices with no local value from the output list
513 .   n - number of global indices to map
514 -   idx - global indices to map
515 
516     Output Parameters:
517 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
518 -   idxout - local index of each global index, one must pass in an array long enough
519              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
520              idxout == NULL to determine the required length (returned in nout)
521              and then allocate the required space and call ISGlobalToLocalMappingApply()
522              a second time to set the values.
523 
524     Notes:
525     Either nout or idxout may be NULL. idx and idxout may be identical.
526 
527     This is not scalable in memory usage. Each processor requires O(Nglobal) size
528     array to compute these.
529 
530     Level: advanced
531 
532     Developer Note: The manual page states that idx and idxout may be identical but the calling
533        sequence declares idx as const so it cannot be the same as idxout.
534 
535     Concepts: mapping^global to local
536 
537 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApplyBlock(), ISLocalToGlobalMappingCreate(),
538           ISLocalToGlobalMappingDestroy()
539 @*/
540 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
541                                             PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
542 {
543   PetscInt       i,*globals,nf = 0,tmp,start,end,bs;
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
548   if (!mapping->globals) {
549     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
550   }
551   globals = mapping->globals;
552   start   = mapping->globalstart;
553   end     = mapping->globalend;
554   bs      = mapping->bs;
555 
556   if (type == IS_GTOLM_MASK) {
557     if (idxout) {
558       for (i=0; i<n; i++) {
559         if (idx[i] < 0) idxout[i] = idx[i];
560         else if (idx[i] < bs*start) idxout[i] = -1;
561         else if (idx[i] > bs*end)   idxout[i] = -1;
562         else                        idxout[i] = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
563       }
564     }
565     if (nout) *nout = n;
566   } else {
567     if (idxout) {
568       for (i=0; i<n; i++) {
569         if (idx[i] < 0) continue;
570         if (idx[i] < bs*start) continue;
571         if (idx[i] > bs*end) continue;
572         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
573         if (tmp < 0) continue;
574         idxout[nf++] = tmp;
575       }
576     } else {
577       for (i=0; i<n; i++) {
578         if (idx[i] < 0) continue;
579         if (idx[i] < bs*start) continue;
580         if (idx[i] > bs*end) continue;
581         tmp = bs*globals[idx[i]/bs - start] + (idx[i] % bs);
582         if (tmp < 0) continue;
583         nf++;
584       }
585     }
586     if (nout) *nout = nf;
587   }
588   PetscFunctionReturn(0);
589 }
590 
591 #undef __FUNCT__
592 #define __FUNCT__ "ISGlobalToLocalMappingApplyBlock"
593 /*@
594     ISGlobalToLocalMappingApplyBlock - Provides the local block numbering for a list of integers
595     specified with a block global numbering.
596 
597     Not collective
598 
599     Input Parameters:
600 +   mapping - mapping between local and global numbering
601 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
602            IS_GTOLM_DROP - drops the indices with no local value from the output list
603 .   n - number of global indices to map
604 -   idx - global indices to map
605 
606     Output Parameters:
607 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
608 -   idxout - local index of each global index, one must pass in an array long enough
609              to hold all the indices. You can call ISGlobalToLocalMappingApplyBlock() with
610              idxout == NULL to determine the required length (returned in nout)
611              and then allocate the required space and call ISGlobalToLocalMappingApplyBlock()
612              a second time to set the values.
613 
614     Notes:
615     Either nout or idxout may be NULL. idx and idxout may be identical.
616 
617     This is not scalable in memory usage. Each processor requires O(Nglobal) size
618     array to compute these.
619 
620     Level: advanced
621 
622     Developer Note: The manual page states that idx and idxout may be identical but the calling
623        sequence declares idx as const so it cannot be the same as idxout.
624 
625     Concepts: mapping^global to local
626 
627 .seealso: ISLocalToGlobalMappingApply(), ISGlobalToLocalMappingApply(), ISLocalToGlobalMappingCreate(),
628           ISLocalToGlobalMappingDestroy()
629 @*/
630 PetscErrorCode  ISGlobalToLocalMappingApplyBlock(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
631                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
632 {
633   PetscInt       i,*globals,nf = 0,tmp,start,end;
634   PetscErrorCode ierr;
635 
636   PetscFunctionBegin;
637   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
638   if (!mapping->globals) {
639     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
640   }
641   globals = mapping->globals;
642   start   = mapping->globalstart;
643   end     = mapping->globalend;
644 
645   if (type == IS_GTOLM_MASK) {
646     if (idxout) {
647       for (i=0; i<n; i++) {
648         if (idx[i] < 0) idxout[i] = idx[i];
649         else if (idx[i] < start) idxout[i] = -1;
650         else if (idx[i] > end)   idxout[i] = -1;
651         else                     idxout[i] = globals[idx[i] - start];
652       }
653     }
654     if (nout) *nout = n;
655   } else {
656     if (idxout) {
657       for (i=0; i<n; i++) {
658         if (idx[i] < 0) continue;
659         if (idx[i] < start) continue;
660         if (idx[i] > end) continue;
661         tmp = globals[idx[i] - start];
662         if (tmp < 0) continue;
663         idxout[nf++] = tmp;
664       }
665     } else {
666       for (i=0; i<n; i++) {
667         if (idx[i] < 0) continue;
668         if (idx[i] < start) continue;
669         if (idx[i] > end) continue;
670         tmp = globals[idx[i] - start];
671         if (tmp < 0) continue;
672         nf++;
673       }
674     }
675     if (nout) *nout = nf;
676   }
677   PetscFunctionReturn(0);
678 }
679 
680 #undef __FUNCT__
681 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockInfo"
682 /*@C
683     ISLocalToGlobalMappingGetBlockInfo - Gets the neighbor information for each processor and
684      each index shared by more than one processor
685 
686     Collective on ISLocalToGlobalMapping
687 
688     Input Parameters:
689 .   mapping - the mapping from local to global indexing
690 
691     Output Parameter:
692 +   nproc - number of processors that are connected to this one
693 .   proc - neighboring processors
694 .   numproc - number of indices for each subdomain (processor)
695 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
696 
697     Level: advanced
698 
699     Concepts: mapping^local to global
700 
701     Fortran Usage:
702 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
703 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
704           PetscInt indices[nproc][numprocmax],ierr)
705         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
706         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
707 
708 
709 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
710           ISLocalToGlobalMappingRestoreInfo()
711 @*/
712 PetscErrorCode  ISLocalToGlobalMappingGetBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
713 {
714   PetscErrorCode ierr;
715   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
716   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
717   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
718   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
719   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
720   PetscInt       first_procs,first_numprocs,*first_indices;
721   MPI_Request    *recv_waits,*send_waits;
722   MPI_Status     recv_status,*send_status,*recv_statuses;
723   MPI_Comm       comm;
724   PetscBool      debug = PETSC_FALSE;
725 
726   PetscFunctionBegin;
727   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
728   ierr = PetscObjectGetComm((PetscObject)mapping,&comm);CHKERRQ(ierr);
729   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
730   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
731   if (size == 1) {
732     *nproc         = 0;
733     *procs         = NULL;
734     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
735     (*numprocs)[0] = 0;
736     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
737     (*indices)[0]  = NULL;
738     PetscFunctionReturn(0);
739   }
740 
741   ierr = PetscOptionsGetBool(NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,NULL);CHKERRQ(ierr);
742 
743   /*
744     Notes on ISLocalToGlobalMappingGetBlockInfo
745 
746     globally owned node - the nodes that have been assigned to this processor in global
747            numbering, just for this routine.
748 
749     nontrivial globally owned node - node assigned to this processor that is on a subdomain
750            boundary (i.e. is has more than one local owner)
751 
752     locally owned node - node that exists on this processors subdomain
753 
754     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
755            local subdomain
756   */
757   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
758   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
759   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
760 
761   for (i=0; i<n; i++) {
762     if (lindices[i] > max) max = lindices[i];
763   }
764   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
765   Ng++;
766   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
767   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
768   scale  = Ng/size + 1;
769   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
770   rstart = scale*rank;
771 
772   /* determine ownership ranges of global indices */
773   ierr = PetscMalloc1(2*size,&nprocs);CHKERRQ(ierr);
774   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
775 
776   /* determine owners of each local node  */
777   ierr = PetscMalloc1(n,&owner);CHKERRQ(ierr);
778   for (i=0; i<n; i++) {
779     proc             = lindices[i]/scale; /* processor that globally owns this index */
780     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
781     owner[i]         = proc;
782     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
783   }
784   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
785   ierr = PetscInfo1(mapping,"Number of global owners for my local data %D\n",nsends);CHKERRQ(ierr);
786 
787   /* inform other processors of number of messages and max length*/
788   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
789   ierr = PetscInfo1(mapping,"Number of local owners for my global data %D\n",nrecvs);CHKERRQ(ierr);
790 
791   /* post receives for owned rows */
792   ierr = PetscMalloc1((2*nrecvs+1)*(nmax+1),&recvs);CHKERRQ(ierr);
793   ierr = PetscMalloc1((nrecvs+1),&recv_waits);CHKERRQ(ierr);
794   for (i=0; i<nrecvs; i++) {
795     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
796   }
797 
798   /* pack messages containing lists of local nodes to owners */
799   ierr      = PetscMalloc1((2*n+1),&sends);CHKERRQ(ierr);
800   ierr      = PetscMalloc1((size+1),&starts);CHKERRQ(ierr);
801   starts[0] = 0;
802   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
803   for (i=0; i<n; i++) {
804     sends[starts[owner[i]]++] = lindices[i];
805     sends[starts[owner[i]]++] = i;
806   }
807   ierr = PetscFree(owner);CHKERRQ(ierr);
808   starts[0] = 0;
809   for (i=1; i<size; i++) starts[i] = starts[i-1] + 2*nprocs[2*i-2];
810 
811   /* send the messages */
812   ierr = PetscMalloc1((nsends+1),&send_waits);CHKERRQ(ierr);
813   ierr = PetscMalloc1((nsends+1),&dest);CHKERRQ(ierr);
814   cnt = 0;
815   for (i=0; i<size; i++) {
816     if (nprocs[2*i]) {
817       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
818       dest[cnt] = i;
819       cnt++;
820     }
821   }
822   ierr = PetscFree(starts);CHKERRQ(ierr);
823 
824   /* wait on receives */
825   ierr = PetscMalloc1((nrecvs+1),&source);CHKERRQ(ierr);
826   ierr = PetscMalloc1((nrecvs+1),&len);CHKERRQ(ierr);
827   cnt  = nrecvs;
828   ierr = PetscMalloc1((ng+1),&nownedsenders);CHKERRQ(ierr);
829   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
830   while (cnt) {
831     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
832     /* unpack receives into our local space */
833     ierr          = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
834     source[imdex] = recv_status.MPI_SOURCE;
835     len[imdex]    = len[imdex]/2;
836     /* count how many local owners for each of my global owned indices */
837     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
838     cnt--;
839   }
840   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
841 
842   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
843   nowned  = 0;
844   nownedm = 0;
845   for (i=0; i<ng; i++) {
846     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
847   }
848 
849   /* create single array to contain rank of all local owners of each globally owned index */
850   ierr      = PetscMalloc1((nownedm+1),&ownedsenders);CHKERRQ(ierr);
851   ierr      = PetscMalloc1((ng+1),&starts);CHKERRQ(ierr);
852   starts[0] = 0;
853   for (i=1; i<ng; i++) {
854     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
855     else starts[i] = starts[i-1];
856   }
857 
858   /* for each nontrival globally owned node list all arriving processors */
859   for (i=0; i<nrecvs; i++) {
860     for (j=0; j<len[i]; j++) {
861       node = recvs[2*i*nmax+2*j]-rstart;
862       if (nownedsenders[node] > 1) ownedsenders[starts[node]++] = source[i];
863     }
864   }
865 
866   if (debug) { /* -----------------------------------  */
867     starts[0] = 0;
868     for (i=1; i<ng; i++) {
869       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
870       else starts[i] = starts[i-1];
871     }
872     for (i=0; i<ng; i++) {
873       if (nownedsenders[i] > 1) {
874         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %D local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
875         for (j=0; j<nownedsenders[i]; j++) {
876           ierr = PetscSynchronizedPrintf(comm,"%D ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
877         }
878         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
879       }
880     }
881     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
882   } /* -----------------------------------  */
883 
884   /* wait on original sends */
885   if (nsends) {
886     ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
887     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
888     ierr = PetscFree(send_status);CHKERRQ(ierr);
889   }
890   ierr = PetscFree(send_waits);CHKERRQ(ierr);
891   ierr = PetscFree(sends);CHKERRQ(ierr);
892   ierr = PetscFree(nprocs);CHKERRQ(ierr);
893 
894   /* pack messages to send back to local owners */
895   starts[0] = 0;
896   for (i=1; i<ng; i++) {
897     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
898     else starts[i] = starts[i-1];
899   }
900   nsends2 = nrecvs;
901   ierr    = PetscMalloc1((nsends2+1),&nprocs);CHKERRQ(ierr); /* length of each message */
902   for (i=0; i<nrecvs; i++) {
903     nprocs[i] = 1;
904     for (j=0; j<len[i]; j++) {
905       node = recvs[2*i*nmax+2*j]-rstart;
906       if (nownedsenders[node] > 1) nprocs[i] += 2 + nownedsenders[node];
907     }
908   }
909   nt = 0;
910   for (i=0; i<nsends2; i++) nt += nprocs[i];
911 
912   ierr = PetscMalloc1((nt+1),&sends2);CHKERRQ(ierr);
913   ierr = PetscMalloc1((nsends2+1),&starts2);CHKERRQ(ierr);
914 
915   starts2[0] = 0;
916   for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
917   /*
918      Each message is 1 + nprocs[i] long, and consists of
919        (0) the number of nodes being sent back
920        (1) the local node number,
921        (2) the number of processors sharing it,
922        (3) the processors sharing it
923   */
924   for (i=0; i<nsends2; i++) {
925     cnt = 1;
926     sends2[starts2[i]] = 0;
927     for (j=0; j<len[i]; j++) {
928       node = recvs[2*i*nmax+2*j]-rstart;
929       if (nownedsenders[node] > 1) {
930         sends2[starts2[i]]++;
931         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
932         sends2[starts2[i]+cnt++] = nownedsenders[node];
933         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
934         cnt += nownedsenders[node];
935       }
936     }
937   }
938 
939   /* receive the message lengths */
940   nrecvs2 = nsends;
941   ierr    = PetscMalloc1((nrecvs2+1),&lens2);CHKERRQ(ierr);
942   ierr    = PetscMalloc1((nrecvs2+1),&starts3);CHKERRQ(ierr);
943   ierr    = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
944   for (i=0; i<nrecvs2; i++) {
945     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
946   }
947 
948   /* send the message lengths */
949   for (i=0; i<nsends2; i++) {
950     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
951   }
952 
953   /* wait on receives of lens */
954   if (nrecvs2) {
955     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
956     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
957     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
958   }
959   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
960 
961   starts3[0] = 0;
962   nt         = 0;
963   for (i=0; i<nrecvs2-1; i++) {
964     starts3[i+1] = starts3[i] + lens2[i];
965     nt          += lens2[i];
966   }
967   if (nrecvs2) nt += lens2[nrecvs2-1];
968 
969   ierr = PetscMalloc1((nt+1),&recvs2);CHKERRQ(ierr);
970   ierr = PetscMalloc1((nrecvs2+1),&recv_waits);CHKERRQ(ierr);
971   for (i=0; i<nrecvs2; i++) {
972     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
973   }
974 
975   /* send the messages */
976   ierr = PetscMalloc1((nsends2+1),&send_waits);CHKERRQ(ierr);
977   for (i=0; i<nsends2; i++) {
978     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
979   }
980 
981   /* wait on receives */
982   if (nrecvs2) {
983     ierr = PetscMalloc1(nrecvs2,&recv_statuses);CHKERRQ(ierr);
984     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
985     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
986   }
987   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
988   ierr = PetscFree(nprocs);CHKERRQ(ierr);
989 
990   if (debug) { /* -----------------------------------  */
991     cnt = 0;
992     for (i=0; i<nrecvs2; i++) {
993       nt = recvs2[cnt++];
994       for (j=0; j<nt; j++) {
995         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %D number of subdomains %D: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
996         for (k=0; k<recvs2[cnt+1]; k++) {
997           ierr = PetscSynchronizedPrintf(comm,"%D ",recvs2[cnt+2+k]);CHKERRQ(ierr);
998         }
999         cnt += 2 + recvs2[cnt+1];
1000         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1001       }
1002     }
1003     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1004   } /* -----------------------------------  */
1005 
1006   /* count number subdomains for each local node */
1007   ierr = PetscMalloc1(size,&nprocs);CHKERRQ(ierr);
1008   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
1009   cnt  = 0;
1010   for (i=0; i<nrecvs2; i++) {
1011     nt = recvs2[cnt++];
1012     for (j=0; j<nt; j++) {
1013       for (k=0; k<recvs2[cnt+1]; k++) nprocs[recvs2[cnt+2+k]]++;
1014       cnt += 2 + recvs2[cnt+1];
1015     }
1016   }
1017   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
1018   *nproc    = nt;
1019   ierr = PetscMalloc1((nt+1),procs);CHKERRQ(ierr);
1020   ierr = PetscMalloc1((nt+1),numprocs);CHKERRQ(ierr);
1021   ierr = PetscMalloc1((nt+1),indices);CHKERRQ(ierr);
1022   for (i=0;i<nt+1;i++) (*indices)[i]=NULL;
1023   ierr = PetscMalloc1(size,&bprocs);CHKERRQ(ierr);
1024   cnt       = 0;
1025   for (i=0; i<size; i++) {
1026     if (nprocs[i] > 0) {
1027       bprocs[i]        = cnt;
1028       (*procs)[cnt]    = i;
1029       (*numprocs)[cnt] = nprocs[i];
1030       ierr             = PetscMalloc1(nprocs[i],&(*indices)[cnt]);CHKERRQ(ierr);
1031       cnt++;
1032     }
1033   }
1034 
1035   /* make the list of subdomains for each nontrivial local node */
1036   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
1037   cnt  = 0;
1038   for (i=0; i<nrecvs2; i++) {
1039     nt = recvs2[cnt++];
1040     for (j=0; j<nt; j++) {
1041       for (k=0; k<recvs2[cnt+1]; k++) (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
1042       cnt += 2 + recvs2[cnt+1];
1043     }
1044   }
1045   ierr = PetscFree(bprocs);CHKERRQ(ierr);
1046   ierr = PetscFree(recvs2);CHKERRQ(ierr);
1047 
1048   /* sort the node indexing by their global numbers */
1049   nt = *nproc;
1050   for (i=0; i<nt; i++) {
1051     ierr = PetscMalloc1(((*numprocs)[i]),&tmp);CHKERRQ(ierr);
1052     for (j=0; j<(*numprocs)[i]; j++) tmp[j] = lindices[(*indices)[i][j]];
1053     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
1054     ierr = PetscFree(tmp);CHKERRQ(ierr);
1055   }
1056 
1057   if (debug) { /* -----------------------------------  */
1058     nt = *nproc;
1059     for (i=0; i<nt; i++) {
1060       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %D number of indices %D: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
1061       for (j=0; j<(*numprocs)[i]; j++) {
1062         ierr = PetscSynchronizedPrintf(comm,"%D ",(*indices)[i][j]);CHKERRQ(ierr);
1063       }
1064       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
1065     }
1066     ierr = PetscSynchronizedFlush(comm,PETSC_STDOUT);CHKERRQ(ierr);
1067   } /* -----------------------------------  */
1068 
1069   /* wait on sends */
1070   if (nsends2) {
1071     ierr = PetscMalloc1(nsends2,&send_status);CHKERRQ(ierr);
1072     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
1073     ierr = PetscFree(send_status);CHKERRQ(ierr);
1074   }
1075 
1076   ierr = PetscFree(starts3);CHKERRQ(ierr);
1077   ierr = PetscFree(dest);CHKERRQ(ierr);
1078   ierr = PetscFree(send_waits);CHKERRQ(ierr);
1079 
1080   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
1081   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
1082   ierr = PetscFree(starts);CHKERRQ(ierr);
1083   ierr = PetscFree(starts2);CHKERRQ(ierr);
1084   ierr = PetscFree(lens2);CHKERRQ(ierr);
1085 
1086   ierr = PetscFree(source);CHKERRQ(ierr);
1087   ierr = PetscFree(len);CHKERRQ(ierr);
1088   ierr = PetscFree(recvs);CHKERRQ(ierr);
1089   ierr = PetscFree(nprocs);CHKERRQ(ierr);
1090   ierr = PetscFree(sends2);CHKERRQ(ierr);
1091 
1092   /* put the information about myself as the first entry in the list */
1093   first_procs    = (*procs)[0];
1094   first_numprocs = (*numprocs)[0];
1095   first_indices  = (*indices)[0];
1096   for (i=0; i<*nproc; i++) {
1097     if ((*procs)[i] == rank) {
1098       (*procs)[0]    = (*procs)[i];
1099       (*numprocs)[0] = (*numprocs)[i];
1100       (*indices)[0]  = (*indices)[i];
1101       (*procs)[i]    = first_procs;
1102       (*numprocs)[i] = first_numprocs;
1103       (*indices)[i]  = first_indices;
1104       break;
1105     }
1106   }
1107   PetscFunctionReturn(0);
1108 }
1109 
1110 #undef __FUNCT__
1111 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockInfo"
1112 /*@C
1113     ISLocalToGlobalMappingRestoreBlockInfo - Frees the memory allocated by ISLocalToGlobalMappingGetBlockInfo()
1114 
1115     Collective on ISLocalToGlobalMapping
1116 
1117     Input Parameters:
1118 .   mapping - the mapping from local to global indexing
1119 
1120     Output Parameter:
1121 +   nproc - number of processors that are connected to this one
1122 .   proc - neighboring processors
1123 .   numproc - number of indices for each processor
1124 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1125 
1126     Level: advanced
1127 
1128 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1129           ISLocalToGlobalMappingGetInfo()
1130 @*/
1131 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1132 {
1133   PetscErrorCode ierr;
1134   PetscInt       i;
1135 
1136   PetscFunctionBegin;
1137   ierr = PetscFree(*procs);CHKERRQ(ierr);
1138   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1139   if (*indices) {
1140     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1141     for (i=1; i<*nproc; i++) {
1142       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1143     }
1144     ierr = PetscFree(*indices);CHKERRQ(ierr);
1145   }
1146   PetscFunctionReturn(0);
1147 }
1148 
1149 #undef __FUNCT__
1150 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
1151 /*@C
1152     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
1153      each index shared by more than one processor
1154 
1155     Collective on ISLocalToGlobalMapping
1156 
1157     Input Parameters:
1158 .   mapping - the mapping from local to global indexing
1159 
1160     Output Parameter:
1161 +   nproc - number of processors that are connected to this one
1162 .   proc - neighboring processors
1163 .   numproc - number of indices for each subdomain (processor)
1164 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
1165 
1166     Level: advanced
1167 
1168     Concepts: mapping^local to global
1169 
1170     Fortran Usage:
1171 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
1172 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
1173           PetscInt indices[nproc][numprocmax],ierr)
1174         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
1175         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
1176 
1177 
1178 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1179           ISLocalToGlobalMappingRestoreInfo()
1180 @*/
1181 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1182 {
1183   PetscErrorCode ierr;
1184   PetscInt       **bindices = NULL,bs = mapping->bs,i,j,k;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
1188   ierr = ISLocalToGlobalMappingGetBlockInfo(mapping,nproc,procs,numprocs,&bindices);CHKERRQ(ierr);
1189   ierr = PetscCalloc1(*nproc,&*indices);CHKERRQ(ierr);
1190   for (i=0; i<*nproc; i++) {
1191     ierr = PetscMalloc1(bs*(*numprocs)[i],&(*indices)[i]);CHKERRQ(ierr);
1192     for (j=0; j<(*numprocs)[i]; j++) {
1193       for (k=0; k<bs; k++) {
1194         (*indices)[i][j*bs+k] = bs*bindices[i][j] + k;
1195       }
1196     }
1197     (*numprocs)[i] *= bs;
1198   }
1199   if (bindices) {
1200     ierr = PetscFree(bindices[0]);CHKERRQ(ierr);
1201     for (i=1; i<*nproc; i++) {
1202       ierr = PetscFree(bindices[i]);CHKERRQ(ierr);
1203     }
1204     ierr = PetscFree(bindices);CHKERRQ(ierr);
1205   }
1206   PetscFunctionReturn(0);
1207 }
1208 
1209 #undef __FUNCT__
1210 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
1211 /*@C
1212     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
1213 
1214     Collective on ISLocalToGlobalMapping
1215 
1216     Input Parameters:
1217 .   mapping - the mapping from local to global indexing
1218 
1219     Output Parameter:
1220 +   nproc - number of processors that are connected to this one
1221 .   proc - neighboring processors
1222 .   numproc - number of indices for each processor
1223 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
1224 
1225     Level: advanced
1226 
1227 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1228           ISLocalToGlobalMappingGetInfo()
1229 @*/
1230 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1231 {
1232   PetscErrorCode ierr;
1233 
1234   PetscFunctionBegin;
1235   ierr = ISLocalToGlobalMappingRestoreBlockInfo(mapping,nproc,procs,numprocs,indices);CHKERRQ(ierr);
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1241 /*@C
1242    ISLocalToGlobalMappingGetIndices - Get global indices for every local point that is mapped
1243 
1244    Not Collective
1245 
1246    Input Arguments:
1247 . ltog - local to global mapping
1248 
1249    Output Arguments:
1250 . array - array of indices, the length of this array may be obtained with ISLocalToGlobalMappingGetSize()
1251 
1252    Level: advanced
1253 
1254    Notes: ISLocalToGlobalMappingGetSize() returns the length the this array
1255 
1256 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices(), ISLocalToGlobalMappingGetBlockIndices(), ISLocalToGlobalMappingRestoreBlockIndices()
1257 @*/
1258 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1259 {
1260   PetscFunctionBegin;
1261   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1262   PetscValidPointer(array,2);
1263   if (ltog->bs == 1) {
1264     *array = ltog->indices;
1265   } else {
1266     PetscInt       *jj,k,i,j,n = ltog->n, bs = ltog->bs;
1267     const PetscInt *ii;
1268     PetscErrorCode ierr;
1269 
1270     ierr = PetscMalloc1(bs*n,&jj);CHKERRQ(ierr);
1271     *array = jj;
1272     k    = 0;
1273     ii   = ltog->indices;
1274     for (i=0; i<n; i++)
1275       for (j=0; j<bs; j++)
1276         jj[k++] = bs*ii[i] + j;
1277   }
1278   PetscFunctionReturn(0);
1279 }
1280 
1281 #undef __FUNCT__
1282 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1283 /*@C
1284    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1285 
1286    Not Collective
1287 
1288    Input Arguments:
1289 + ltog - local to global mapping
1290 - array - array of indices
1291 
1292    Level: advanced
1293 
1294 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1295 @*/
1296 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1297 {
1298   PetscFunctionBegin;
1299   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1300   PetscValidPointer(array,2);
1301   if (ltog->bs == 1 && *array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1302 
1303   if (ltog->bs > 1) {
1304     PetscErrorCode ierr;
1305     ierr = PetscFree(*(void**)array);CHKERRQ(ierr);
1306   }
1307   PetscFunctionReturn(0);
1308 }
1309 
1310 #undef __FUNCT__
1311 #define __FUNCT__ "ISLocalToGlobalMappingGetBlockIndices"
1312 /*@C
1313    ISLocalToGlobalMappingGetBlockIndices - Get global indices for every local block
1314 
1315    Not Collective
1316 
1317    Input Arguments:
1318 . ltog - local to global mapping
1319 
1320    Output Arguments:
1321 . array - array of indices
1322 
1323    Level: advanced
1324 
1325 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreBlockIndices()
1326 @*/
1327 PetscErrorCode  ISLocalToGlobalMappingGetBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1328 {
1329   PetscFunctionBegin;
1330   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1331   PetscValidPointer(array,2);
1332   *array = ltog->indices;
1333   PetscFunctionReturn(0);
1334 }
1335 
1336 #undef __FUNCT__
1337 #define __FUNCT__ "ISLocalToGlobalMappingRestoreBlockIndices"
1338 /*@C
1339    ISLocalToGlobalMappingRestoreBlockIndices - Restore indices obtained with ISLocalToGlobalMappingGetBlockIndices()
1340 
1341    Not Collective
1342 
1343    Input Arguments:
1344 + ltog - local to global mapping
1345 - array - array of indices
1346 
1347    Level: advanced
1348 
1349 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1350 @*/
1351 PetscErrorCode  ISLocalToGlobalMappingRestoreBlockIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1352 {
1353   PetscFunctionBegin;
1354   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1355   PetscValidPointer(array,2);
1356   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1357   *array = NULL;
1358   PetscFunctionReturn(0);
1359 }
1360 
1361 #undef __FUNCT__
1362 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1363 /*@C
1364    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1365 
1366    Not Collective
1367 
1368    Input Arguments:
1369 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1370 . n - number of mappings to concatenate
1371 - ltogs - local to global mappings
1372 
1373    Output Arguments:
1374 . ltogcat - new mapping
1375 
1376    Note: this currently always returns a mapping with block size of 1
1377 
1378    Developer Note: If all the input mapping have the same block size we could easily handle that as a special case
1379 
1380    Level: advanced
1381 
1382 .seealso: ISLocalToGlobalMappingCreate()
1383 @*/
1384 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1385 {
1386   PetscInt       i,cnt,m,*idx;
1387   PetscErrorCode ierr;
1388 
1389   PetscFunctionBegin;
1390   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1391   if (n > 0) PetscValidPointer(ltogs,3);
1392   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1393   PetscValidPointer(ltogcat,4);
1394   for (cnt=0,i=0; i<n; i++) {
1395     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1396     cnt += m;
1397   }
1398   ierr = PetscMalloc1(cnt,&idx);CHKERRQ(ierr);
1399   for (cnt=0,i=0; i<n; i++) {
1400     const PetscInt *subidx;
1401     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1402     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1403     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1404     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1405     cnt += m;
1406   }
1407   ierr = ISLocalToGlobalMappingCreate(comm,1,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1408   PetscFunctionReturn(0);
1409 }
1410 
1411 
1412