xref: /petsc/src/vec/is/utils/isltog.c (revision c8cd8b03a8e69285ecfea39db41c0ff684948fa5) !
1 #define PETSCVEC_DLL
2 
3 #include "petscvec.h"   /*I "petscsys.h" I*/
4 #include "include/private/isimpl.h"    /*I "petscis.h"  I*/
5 
6 PetscCookie PETSCVEC_DLLEXPORT IS_LTOGM_COOKIE = 0;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
10 /*@C
11     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
12 
13     Not Collective
14 
15     Input Parameter:
16 .   ltog - local to global mapping
17 
18     Output Parameter:
19 .   n - the number of entries in the local mapping
20 
21     Level: advanced
22 
23     Concepts: mapping^local to global
24 
25 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
26 @*/
27 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
28 {
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
31   PetscValidIntPointer(n,2);
32   *n = mapping->n;
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "ISLocalToGlobalMappingView"
38 /*@C
39     ISLocalToGlobalMappingView - View a local to global mapping
40 
41     Not Collective
42 
43     Input Parameters:
44 +   ltog - local to global mapping
45 -   viewer - viewer
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
54 {
55   PetscInt        i;
56   PetscMPIInt     rank;
57   PetscTruth      iascii;
58   PetscErrorCode  ierr;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
62   if (!viewer) {
63     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
64   }
65   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_COOKIE,2);
66 
67   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
68   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
69   if (iascii) {
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74   } else {
75     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   }
77 
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
83 /*@
84     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
85     ordering and a global parallel ordering.
86 
87     Not collective
88 
89     Input Parameter:
90 .   is - index set containing the global numbers for each local
91 
92     Output Parameter:
93 .   mapping - new mapping data structure
94 
95     Level: advanced
96 
97     Concepts: mapping^local to global
98 
99 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
100 @*/
101 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
102 {
103   PetscErrorCode ierr;
104   PetscInt       n,*indices;
105   MPI_Comm       comm;
106 
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(is,IS_COOKIE,1);
109   PetscValidPointer(mapping,2);
110 
111   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
112   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
113   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
114   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
115   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
116 
117   PetscFunctionReturn(0);
118 }
119 
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
123 /*@
124     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
125     ordering and a global parallel ordering.
126 
127     Not Collective, but communicator may have more than one process
128 
129     Input Parameters:
130 +   comm - MPI communicator
131 .   n - the number of local elements
132 -   indices - the global index for each local element
133 
134     Output Parameter:
135 .   mapping - new mapping data structure
136 
137     Level: advanced
138 
139     Concepts: mapping^local to global
140 
141 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreateNC()
142 @*/
143 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],ISLocalToGlobalMapping *mapping)
144 {
145   PetscErrorCode ierr;
146   PetscInt       *in;
147 
148   PetscFunctionBegin;
149   if (n) PetscValidIntPointer(indices,3);
150   PetscValidPointer(mapping,4);
151   ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
152   ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
153   ierr = ISLocalToGlobalMappingCreateNC(cm,n,in,mapping);CHKERRQ(ierr);
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "ISLocalToGlobalMappingCreateNC"
159 /*@C
160     ISLocalToGlobalMappingCreateNC - Creates a mapping between a local (0 to n)
161     ordering and a global parallel ordering.
162 
163     Not Collective, but communicator may have more than one process
164 
165     Input Parameters:
166 +   comm - MPI communicator
167 .   n - the number of local elements
168 -   indices - the global index for each local element
169 
170     Output Parameter:
171 .   mapping - new mapping data structure
172 
173     Level: developer
174 
175     Notes: Does not copy the indices, just keeps the pointer to the indices. The ISLocalToGlobalMappingDestroy()
176     will free the space so it must be obtained with PetscMalloc() and it must not be freed elsewhere.
177 
178     Concepts: mapping^local to global
179 
180 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate()
181 @*/
182 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateNC(MPI_Comm cm,PetscInt n,const PetscInt indices[],ISLocalToGlobalMapping *mapping)
183 {
184   PetscErrorCode ierr;
185 
186   PetscFunctionBegin;
187   if (n) PetscValidIntPointer(indices,3);
188   PetscValidPointer(mapping,4);
189   *mapping = PETSC_NULL;
190 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
191   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
192 #endif
193 
194   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping",
195 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
196 
197   (*mapping)->n       = n;
198   (*mapping)->indices = (PetscInt*)indices;
199   ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
200 
201   /*
202     Do not create the global to local mapping. This is only created if
203     ISGlobalToLocalMapping() is called
204   */
205   (*mapping)->globals = 0;
206   PetscFunctionReturn(0);
207 }
208 
209 #undef __FUNCT__
210 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
211 /*@
212     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
213        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
214        and VecSetLocalToGlobalMappingBlock().
215 
216     Not Collective, but communicator may have more than one process
217 
218     Input Parameters:
219 +    inmap - original point-wise mapping
220 -    bs - block size
221 
222     Output Parameter:
223 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
224 
225     Level: advanced
226 
227     Concepts: mapping^local to global
228 
229 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
230 @*/
231 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
232 {
233   PetscErrorCode ierr;
234   PetscInt       *ii,i,n;
235 
236   PetscFunctionBegin;
237   PetscValidHeaderSpecific(inmap,IS_LTOGM_COOKIE,1);
238   PetscValidPointer(outmap,1);
239   if (bs > 1) {
240     n    = inmap->n/bs;
241     if (n*bs != inmap->n) SETERRQ(PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
242     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
243     for (i=0; i<n; i++) {
244       ii[i] = inmap->indices[bs*i]/bs;
245     }
246     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,outmap);CHKERRQ(ierr);
247     ierr = PetscFree(ii);CHKERRQ(ierr);
248   } else {
249     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
250     *outmap = inmap;
251   }
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
257 /*@
258    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
259    ordering and a global parallel ordering.
260 
261    Note Collective
262 
263    Input Parameters:
264 .  mapping - mapping data structure
265 
266    Level: advanced
267 
268 .seealso: ISLocalToGlobalMappingCreate()
269 @*/
270 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
271 {
272   PetscErrorCode ierr;
273   PetscFunctionBegin;
274   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
275   if (--((PetscObject)mapping)->refct > 0) PetscFunctionReturn(0);
276   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
277   ierr = PetscFree(mapping->globals);CHKERRQ(ierr);
278   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
279   PetscFunctionReturn(0);
280 }
281 
282 #undef __FUNCT__
283 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
284 /*@
285     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
286     a new index set using the global numbering defined in an ISLocalToGlobalMapping
287     context.
288 
289     Not collective
290 
291     Input Parameters:
292 +   mapping - mapping between local and global numbering
293 -   is - index set in local numbering
294 
295     Output Parameters:
296 .   newis - index set in global numbering
297 
298     Level: advanced
299 
300     Concepts: mapping^local to global
301 
302 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
303           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
304 @*/
305 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
306 {
307   PetscErrorCode ierr;
308   PetscInt       n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n;
309 
310   PetscFunctionBegin;
311   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
312   PetscValidHeaderSpecific(is,IS_COOKIE,2);
313   PetscValidPointer(newis,3);
314 
315   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
316   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
317   idxmap = mapping->indices;
318 
319   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
320   for (i=0; i<n; i++) {
321     if (idxin[i] >= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
322     idxout[i] = idxmap[idxin[i]];
323   }
324   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
325   ierr = ISCreateGeneralNC(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
326   PetscFunctionReturn(0);
327 }
328 
329 /*MC
330    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
331    and converts them to the global numbering.
332 
333    Not collective
334 
335    Input Parameters:
336 +  mapping - the local to global mapping context
337 .  N - number of integers
338 -  in - input indices in local numbering
339 
340    Output Parameter:
341 .  out - indices in global numbering
342 
343    Synopsis:
344    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
345 
346    Notes:
347    The in and out array parameters may be identical.
348 
349    Level: advanced
350 
351 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
352           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
353           AOPetscToApplication(), ISGlobalToLocalMappingApply()
354 
355     Concepts: mapping^local to global
356 
357 M*/
358 
359 /* -----------------------------------------------------------------------------------------*/
360 
361 #undef __FUNCT__
362 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
363 /*
364     Creates the global fields in the ISLocalToGlobalMapping structure
365 */
366 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
367 {
368   PetscErrorCode ierr;
369   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
370 
371   PetscFunctionBegin;
372   end   = 0;
373   start = 100000000;
374 
375   for (i=0; i<n; i++) {
376     if (idx[i] < 0) continue;
377     if (idx[i] < start) start = idx[i];
378     if (idx[i] > end)   end   = idx[i];
379   }
380   if (start > end) {start = 0; end = -1;}
381   mapping->globalstart = start;
382   mapping->globalend   = end;
383 
384   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
385   mapping->globals = globals;
386   for (i=0; i<end-start+1; i++) {
387     globals[i] = -1;
388   }
389   for (i=0; i<n; i++) {
390     if (idx[i] < 0) continue;
391     globals[idx[i] - start] = i;
392   }
393 
394   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
395   PetscFunctionReturn(0);
396 }
397 
398 #undef __FUNCT__
399 #define __FUNCT__ "ISGlobalToLocalMappingApply"
400 /*@
401     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
402     specified with a global numbering.
403 
404     Not collective
405 
406     Input Parameters:
407 +   mapping - mapping between local and global numbering
408 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
409            IS_GTOLM_DROP - drops the indices with no local value from the output list
410 .   n - number of global indices to map
411 -   idx - global indices to map
412 
413     Output Parameters:
414 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
415 -   idxout - local index of each global index, one must pass in an array long enough
416              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
417              idxout == PETSC_NULL to determine the required length (returned in nout)
418              and then allocate the required space and call ISGlobalToLocalMappingApply()
419              a second time to set the values.
420 
421     Notes:
422     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
423 
424     This is not scalable in memory usage. Each processor requires O(Nglobal) size
425     array to compute these.
426 
427     Level: advanced
428 
429     Concepts: mapping^global to local
430 
431 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
432           ISLocalToGlobalMappingDestroy()
433 @*/
434 PetscErrorCode PETSCVEC_DLLEXPORT ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
435                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
436 {
437   PetscInt       i,*globals,nf = 0,tmp,start,end;
438   PetscErrorCode ierr;
439 
440   PetscFunctionBegin;
441   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
442   if (!mapping->globals) {
443     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
444   }
445   globals = mapping->globals;
446   start   = mapping->globalstart;
447   end     = mapping->globalend;
448 
449   if (type == IS_GTOLM_MASK) {
450     if (idxout) {
451       for (i=0; i<n; i++) {
452         if (idx[i] < 0) idxout[i] = idx[i];
453         else if (idx[i] < start) idxout[i] = -1;
454         else if (idx[i] > end)   idxout[i] = -1;
455         else                     idxout[i] = globals[idx[i] - start];
456       }
457     }
458     if (nout) *nout = n;
459   } else {
460     if (idxout) {
461       for (i=0; i<n; i++) {
462         if (idx[i] < 0) continue;
463         if (idx[i] < start) continue;
464         if (idx[i] > end) continue;
465         tmp = globals[idx[i] - start];
466         if (tmp < 0) continue;
467         idxout[nf++] = tmp;
468       }
469     } else {
470       for (i=0; i<n; i++) {
471         if (idx[i] < 0) continue;
472         if (idx[i] < start) continue;
473         if (idx[i] > end) continue;
474         tmp = globals[idx[i] - start];
475         if (tmp < 0) continue;
476         nf++;
477       }
478     }
479     if (nout) *nout = nf;
480   }
481 
482   PetscFunctionReturn(0);
483 }
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
487 /*@C
488     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
489      each index shared by more than one processor
490 
491     Collective on ISLocalToGlobalMapping
492 
493     Input Parameters:
494 .   mapping - the mapping from local to global indexing
495 
496     Output Parameter:
497 +   nproc - number of processors that are connected to this one
498 .   proc - neighboring processors
499 .   numproc - number of indices for each subdomain (processor)
500 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
501 
502     Level: advanced
503 
504     Concepts: mapping^local to global
505 
506     Fortran Usage:
507 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
508 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
509           PetscInt indices[nproc][numprocmax],ierr)
510         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
511         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
512 
513 
514 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
515           ISLocalToGlobalMappingRestoreInfo()
516 @*/
517 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
518 {
519   PetscErrorCode ierr;
520   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
521   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
522   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
523   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
524   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
525   PetscInt       first_procs,first_numprocs,*first_indices;
526   MPI_Request    *recv_waits,*send_waits;
527   MPI_Status     recv_status,*send_status,*recv_statuses;
528   MPI_Comm       comm = ((PetscObject)mapping)->comm;
529   PetscTruth     debug = PETSC_FALSE;
530 
531   PetscFunctionBegin;
532   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
533   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
534   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
535   if (size == 1) {
536     *nproc         = 0;
537     *procs         = PETSC_NULL;
538     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
539     (*numprocs)[0] = 0;
540     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
541     (*indices)[0]  = PETSC_NULL;
542     PetscFunctionReturn(0);
543   }
544 
545   ierr = PetscOptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr);
546 
547   /*
548     Notes on ISLocalToGlobalMappingGetInfo
549 
550     globally owned node - the nodes that have been assigned to this processor in global
551            numbering, just for this routine.
552 
553     nontrivial globally owned node - node assigned to this processor that is on a subdomain
554            boundary (i.e. is has more than one local owner)
555 
556     locally owned node - node that exists on this processors subdomain
557 
558     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
559            local subdomain
560   */
561   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
562   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
563   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
564 
565   for (i=0; i<n; i++) {
566     if (lindices[i] > max) max = lindices[i];
567   }
568   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
569   Ng++;
570   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
571   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
572   scale  = Ng/size + 1;
573   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
574   rstart = scale*rank;
575 
576   /* determine ownership ranges of global indices */
577   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
578   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
579 
580   /* determine owners of each local node  */
581   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
582   for (i=0; i<n; i++) {
583     proc             = lindices[i]/scale; /* processor that globally owns this index */
584     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
585     owner[i]         = proc;
586     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
587   }
588   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
589   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
590 
591   /* inform other processors of number of messages and max length*/
592   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
593   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
594 
595   /* post receives for owned rows */
596   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
597   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
598   for (i=0; i<nrecvs; i++) {
599     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
600   }
601 
602   /* pack messages containing lists of local nodes to owners */
603   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
604   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
605   starts[0]  = 0;
606   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
607   for (i=0; i<n; i++) {
608     sends[starts[owner[i]]++] = lindices[i];
609     sends[starts[owner[i]]++] = i;
610   }
611   ierr = PetscFree(owner);CHKERRQ(ierr);
612   starts[0]  = 0;
613   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
614 
615   /* send the messages */
616   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
617   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
618   cnt = 0;
619   for (i=0; i<size; i++) {
620     if (nprocs[2*i]) {
621       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
622       dest[cnt] = i;
623       cnt++;
624     }
625   }
626   ierr = PetscFree(starts);CHKERRQ(ierr);
627 
628   /* wait on receives */
629   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
630   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
631   cnt  = nrecvs;
632   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
633   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
634   while (cnt) {
635     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
636     /* unpack receives into our local space */
637     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
638     source[imdex]  = recv_status.MPI_SOURCE;
639     len[imdex]     = len[imdex]/2;
640     /* count how many local owners for each of my global owned indices */
641     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
642     cnt--;
643   }
644   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
645 
646   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
647   nowned  = 0;
648   nownedm = 0;
649   for (i=0; i<ng; i++) {
650     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
651   }
652 
653   /* create single array to contain rank of all local owners of each globally owned index */
654   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
655   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
656   starts[0] = 0;
657   for (i=1; i<ng; i++) {
658     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
659     else starts[i] = starts[i-1];
660   }
661 
662   /* for each nontrival globally owned node list all arriving processors */
663   for (i=0; i<nrecvs; i++) {
664     for (j=0; j<len[i]; j++) {
665       node = recvs[2*i*nmax+2*j]-rstart;
666       if (nownedsenders[node] > 1) {
667         ownedsenders[starts[node]++] = source[i];
668       }
669     }
670   }
671 
672   if (debug) { /* -----------------------------------  */
673     starts[0]    = 0;
674     for (i=1; i<ng; i++) {
675       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
676       else starts[i] = starts[i-1];
677     }
678     for (i=0; i<ng; i++) {
679       if (nownedsenders[i] > 1) {
680         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
681         for (j=0; j<nownedsenders[i]; j++) {
682           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
683         }
684         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
685       }
686     }
687     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
688   }/* -----------------------------------  */
689 
690   /* wait on original sends */
691   if (nsends) {
692     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
693     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
694     ierr = PetscFree(send_status);CHKERRQ(ierr);
695   }
696   ierr = PetscFree(send_waits);CHKERRQ(ierr);
697   ierr = PetscFree(sends);CHKERRQ(ierr);
698   ierr = PetscFree(nprocs);CHKERRQ(ierr);
699 
700   /* pack messages to send back to local owners */
701   starts[0]    = 0;
702   for (i=1; i<ng; i++) {
703     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
704     else starts[i] = starts[i-1];
705   }
706   nsends2 = nrecvs;
707   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
708   for (i=0; i<nrecvs; i++) {
709     nprocs[i] = 1;
710     for (j=0; j<len[i]; j++) {
711       node = recvs[2*i*nmax+2*j]-rstart;
712       if (nownedsenders[node] > 1) {
713         nprocs[i] += 2 + nownedsenders[node];
714       }
715     }
716   }
717   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
718   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
719   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
720   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
721   /*
722      Each message is 1 + nprocs[i] long, and consists of
723        (0) the number of nodes being sent back
724        (1) the local node number,
725        (2) the number of processors sharing it,
726        (3) the processors sharing it
727   */
728   for (i=0; i<nsends2; i++) {
729     cnt = 1;
730     sends2[starts2[i]] = 0;
731     for (j=0; j<len[i]; j++) {
732       node = recvs[2*i*nmax+2*j]-rstart;
733       if (nownedsenders[node] > 1) {
734         sends2[starts2[i]]++;
735         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
736         sends2[starts2[i]+cnt++] = nownedsenders[node];
737         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
738         cnt += nownedsenders[node];
739       }
740     }
741   }
742 
743   /* receive the message lengths */
744   nrecvs2 = nsends;
745   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
746   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
747   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
748   for (i=0; i<nrecvs2; i++) {
749     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
750   }
751 
752   /* send the message lengths */
753   for (i=0; i<nsends2; i++) {
754     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
755   }
756 
757   /* wait on receives of lens */
758   if (nrecvs2) {
759     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
760     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
761     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
762   }
763   ierr = PetscFree(recv_waits);
764 
765   starts3[0] = 0;
766   nt         = 0;
767   for (i=0; i<nrecvs2-1; i++) {
768     starts3[i+1] = starts3[i] + lens2[i];
769     nt          += lens2[i];
770   }
771   nt += lens2[nrecvs2-1];
772 
773   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
774   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
775   for (i=0; i<nrecvs2; i++) {
776     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
777   }
778 
779   /* send the messages */
780   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
781   for (i=0; i<nsends2; i++) {
782     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
783   }
784 
785   /* wait on receives */
786   if (nrecvs2) {
787     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
788     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
789     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
790   }
791   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
792   ierr = PetscFree(nprocs);CHKERRQ(ierr);
793 
794   if (debug) { /* -----------------------------------  */
795     cnt = 0;
796     for (i=0; i<nrecvs2; i++) {
797       nt = recvs2[cnt++];
798       for (j=0; j<nt; j++) {
799         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
800         for (k=0; k<recvs2[cnt+1]; k++) {
801           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
802         }
803         cnt += 2 + recvs2[cnt+1];
804         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
805       }
806     }
807     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
808   } /* -----------------------------------  */
809 
810   /* count number subdomains for each local node */
811   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
812   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
813   cnt  = 0;
814   for (i=0; i<nrecvs2; i++) {
815     nt = recvs2[cnt++];
816     for (j=0; j<nt; j++) {
817       for (k=0; k<recvs2[cnt+1]; k++) {
818         nprocs[recvs2[cnt+2+k]]++;
819       }
820       cnt += 2 + recvs2[cnt+1];
821     }
822   }
823   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
824   *nproc    = nt;
825   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
826   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
827   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
828   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
829   cnt       = 0;
830   for (i=0; i<size; i++) {
831     if (nprocs[i] > 0) {
832       bprocs[i]        = cnt;
833       (*procs)[cnt]    = i;
834       (*numprocs)[cnt] = nprocs[i];
835       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
836       cnt++;
837     }
838   }
839 
840   /* make the list of subdomains for each nontrivial local node */
841   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
842   cnt  = 0;
843   for (i=0; i<nrecvs2; i++) {
844     nt = recvs2[cnt++];
845     for (j=0; j<nt; j++) {
846       for (k=0; k<recvs2[cnt+1]; k++) {
847         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
848       }
849       cnt += 2 + recvs2[cnt+1];
850     }
851   }
852   ierr = PetscFree(bprocs);CHKERRQ(ierr);
853   ierr = PetscFree(recvs2);CHKERRQ(ierr);
854 
855   /* sort the node indexing by their global numbers */
856   nt = *nproc;
857   for (i=0; i<nt; i++) {
858     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
859     for (j=0; j<(*numprocs)[i]; j++) {
860       tmp[j] = lindices[(*indices)[i][j]];
861     }
862     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
863     ierr = PetscFree(tmp);CHKERRQ(ierr);
864   }
865 
866   if (debug) { /* -----------------------------------  */
867     nt = *nproc;
868     for (i=0; i<nt; i++) {
869       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
870       for (j=0; j<(*numprocs)[i]; j++) {
871         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
872       }
873       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
874     }
875     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
876   } /* -----------------------------------  */
877 
878   /* wait on sends */
879   if (nsends2) {
880     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
881     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
882     ierr = PetscFree(send_status);CHKERRQ(ierr);
883   }
884 
885   ierr = PetscFree(starts3);CHKERRQ(ierr);
886   ierr = PetscFree(dest);CHKERRQ(ierr);
887   ierr = PetscFree(send_waits);CHKERRQ(ierr);
888 
889   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
890   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
891   ierr = PetscFree(starts);CHKERRQ(ierr);
892   ierr = PetscFree(starts2);CHKERRQ(ierr);
893   ierr = PetscFree(lens2);CHKERRQ(ierr);
894 
895   ierr = PetscFree(source);CHKERRQ(ierr);
896   ierr = PetscFree(len);CHKERRQ(ierr);
897   ierr = PetscFree(recvs);CHKERRQ(ierr);
898   ierr = PetscFree(nprocs);CHKERRQ(ierr);
899   ierr = PetscFree(sends2);CHKERRQ(ierr);
900 
901   /* put the information about myself as the first entry in the list */
902   first_procs    = (*procs)[0];
903   first_numprocs = (*numprocs)[0];
904   first_indices  = (*indices)[0];
905   for (i=0; i<*nproc; i++) {
906     if ((*procs)[i] == rank) {
907       (*procs)[0]    = (*procs)[i];
908       (*numprocs)[0] = (*numprocs)[i];
909       (*indices)[0]  = (*indices)[i];
910       (*procs)[i]    = first_procs;
911       (*numprocs)[i] = first_numprocs;
912       (*indices)[i]  = first_indices;
913       break;
914     }
915   }
916   PetscFunctionReturn(0);
917 }
918 
919 #undef __FUNCT__
920 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
921 /*@C
922     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
923 
924     Collective on ISLocalToGlobalMapping
925 
926     Input Parameters:
927 .   mapping - the mapping from local to global indexing
928 
929     Output Parameter:
930 +   nproc - number of processors that are connected to this one
931 .   proc - neighboring processors
932 .   numproc - number of indices for each processor
933 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
934 
935     Level: advanced
936 
937 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
938           ISLocalToGlobalMappingGetInfo()
939 @*/
940 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
941 {
942   PetscErrorCode ierr;
943   PetscInt       i;
944 
945   PetscFunctionBegin;
946   ierr = PetscFree(*procs);CHKERRQ(ierr);
947   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
948   if (*indices) {
949     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
950     for (i=1; i<*nproc; i++) {
951       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
952     }
953     ierr = PetscFree(*indices);CHKERRQ(ierr);
954   }
955   PetscFunctionReturn(0);
956 }
957