xref: /petsc/src/vec/is/utils/isltog.c (revision a58c3bc3391eee32bc3fd94ac7edeea38fe57aae)
1 #define PETSCVEC_DLL
2 
3 #include "petscvec.h"   /*I "petscsys.h" I*/
4 #include "include/private/isimpl.h"    /*I "petscis.h"  I*/
5 
6 PetscCookie PETSCVEC_DLLEXPORT IS_LTOGM_COOKIE = -1;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
10 /*@C
11     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
12 
13     Not Collective
14 
15     Input Parameter:
16 .   ltog - local to global mapping
17 
18     Output Parameter:
19 .   n - the number of entries in the local mapping
20 
21     Level: advanced
22 
23     Concepts: mapping^local to global
24 
25 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
26 @*/
27 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
28 {
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
31   PetscValidIntPointer(n,2);
32   *n = mapping->n;
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "ISLocalToGlobalMappingView"
38 /*@C
39     ISLocalToGlobalMappingView - View a local to global mapping
40 
41     Not Collective
42 
43     Input Parameters:
44 +   ltog - local to global mapping
45 -   viewer - viewer
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
54 {
55   PetscInt        i;
56   PetscMPIInt     rank;
57   PetscTruth      iascii;
58   PetscErrorCode  ierr;
59 
60   PetscFunctionBegin;
61   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
62   if (!viewer) {
63     ierr = PetscViewerASCIIGetStdout(mapping->comm,&viewer);CHKERRQ(ierr);
64   }
65   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_COOKIE,2);
66 
67   ierr = MPI_Comm_rank(mapping->comm,&rank);CHKERRQ(ierr);
68   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
69   if (iascii) {
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74   } else {
75     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   }
77 
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
83 /*@
84     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
85     ordering and a global parallel ordering.
86 
87     Not collective
88 
89     Input Parameter:
90 .   is - index set containing the global numbers for each local
91 
92     Output Parameter:
93 .   mapping - new mapping data structure
94 
95     Level: advanced
96 
97     Concepts: mapping^local to global
98 
99 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
100 @*/
101 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
102 {
103   PetscErrorCode ierr;
104   PetscInt      n,*indices;
105   MPI_Comm comm;
106 
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(is,IS_COOKIE,1);
109   PetscValidPointer(mapping,2);
110 
111   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
112   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
113   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
114   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
115   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
116 
117   PetscFunctionReturn(0);
118 }
119 
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
123 /*@
124     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
125     ordering and a global parallel ordering.
126 
127     Not Collective, but communicator may have more than one process
128 
129     Input Parameters:
130 +   comm - MPI communicator
131 .   n - the number of local elements
132 -   indices - the global index for each local element
133 
134     Output Parameter:
135 .   mapping - new mapping data structure
136 
137     Level: advanced
138 
139     Concepts: mapping^local to global
140 
141 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreateNC()
142 @*/
143 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],ISLocalToGlobalMapping *mapping)
144 {
145   PetscErrorCode ierr;
146   PetscInt       *in;
147 
148   PetscFunctionBegin;
149   if (n) PetscValidIntPointer(indices,3);
150   PetscValidPointer(mapping,4);
151   ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
152   ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
153   ierr = ISLocalToGlobalMappingCreateNC(cm,n,in,mapping);CHKERRQ(ierr);
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "ISLocalToGlobalMappingCreateNC"
159 /*@C
160     ISLocalToGlobalMappingCreateNC - Creates a mapping between a local (0 to n)
161     ordering and a global parallel ordering.
162 
163     Not Collective, but communicator may have more than one process
164 
165     Input Parameters:
166 +   comm - MPI communicator
167 .   n - the number of local elements
168 -   indices - the global index for each local element
169 
170     Output Parameter:
171 .   mapping - new mapping data structure
172 
173     Level: developer
174 
175     Notes: Does not copy the indices, just keeps the pointer to the indices. The ISLocalToGlobalMappingDestroy()
176     will free the space so it must be obtained with PetscMalloc() and it must not be freed elsewhere.
177 
178     Concepts: mapping^local to global
179 
180 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate()
181 @*/
182 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateNC(MPI_Comm cm,PetscInt n,const PetscInt indices[],ISLocalToGlobalMapping *mapping)
183 {
184   PetscErrorCode ierr;
185 
186   PetscFunctionBegin;
187   if (n) {
188     PetscValidIntPointer(indices,3);
189   }
190   PetscValidPointer(mapping,4);
191   *mapping = PETSC_NULL;
192 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
193   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
194 #endif
195   if (IS_LTOGM_COOKIE == -1) {
196     ierr = PetscLogClassRegister(&IS_LTOGM_COOKIE,"IS L to G Mapping");CHKERRQ(ierr);
197   }
198 
199   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping",
200                     cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
201   ierr = PetscLogObjectMemory(*mapping,sizeof(struct _p_ISLocalToGlobalMapping)+n*sizeof(PetscInt));CHKERRQ(ierr);
202 
203   (*mapping)->n       = n;
204   (*mapping)->indices = (PetscInt*)indices;
205 
206   /*
207       Do not create the global to local mapping. This is only created if
208      ISGlobalToLocalMapping() is called
209   */
210   (*mapping)->globals = 0;
211   PetscFunctionReturn(0);
212 }
213 
214 #undef __FUNCT__
215 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
216 /*@
217     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
218        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
219        and VecSetLocalToGlobalMappingBlock().
220 
221     Not Collective, but communicator may have more than one process
222 
223     Input Parameters:
224 +    inmap - original point-wise mapping
225 -    bs - block size
226 
227     Output Parameter:
228 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
229 
230     Level: advanced
231 
232     Concepts: mapping^local to global
233 
234 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
235 @*/
236 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
237 {
238   PetscErrorCode ierr;
239   PetscInt       *ii,i,n;
240 
241   PetscFunctionBegin;
242   if (bs > 1) {
243     n    = inmap->n/bs;
244     if (n*bs != inmap->n) SETERRQ(PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
245     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
246     for (i=0; i<n; i++) {
247       ii[i] = inmap->indices[bs*i]/bs;
248     }
249     ierr = ISLocalToGlobalMappingCreate(inmap->comm,n,ii,outmap);CHKERRQ(ierr);
250     ierr = PetscFree(ii);CHKERRQ(ierr);
251   } else {
252     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
253     *outmap = inmap;
254   }
255   PetscFunctionReturn(0);
256 }
257 
258 #undef __FUNCT__
259 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
260 /*@
261    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
262    ordering and a global parallel ordering.
263 
264    Note Collective
265 
266    Input Parameters:
267 .  mapping - mapping data structure
268 
269    Level: advanced
270 
271 .seealso: ISLocalToGlobalMappingCreate()
272 @*/
273 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
274 {
275   PetscErrorCode ierr;
276   PetscFunctionBegin;
277   PetscValidPointer(mapping,1);
278   if (--mapping->refct > 0) PetscFunctionReturn(0);
279   if (mapping->refct < 0) {
280     SETERRQ(PETSC_ERR_PLIB,"Mapping already destroyed");
281   }
282 
283   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
284   ierr = PetscFree(mapping->globals);CHKERRQ(ierr);
285   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
291 /*@
292     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
293     a new index set using the global numbering defined in an ISLocalToGlobalMapping
294     context.
295 
296     Not collective
297 
298     Input Parameters:
299 +   mapping - mapping between local and global numbering
300 -   is - index set in local numbering
301 
302     Output Parameters:
303 .   newis - index set in global numbering
304 
305     Level: advanced
306 
307     Concepts: mapping^local to global
308 
309 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
310           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
311 @*/
312 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
313 {
314   PetscErrorCode ierr;
315   PetscInt            n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n;
316 
317   PetscFunctionBegin;
318   PetscValidPointer(mapping,1);
319   PetscValidHeaderSpecific(is,IS_COOKIE,2);
320   PetscValidPointer(newis,3);
321 
322   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
323   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
324   idxmap = mapping->indices;
325 
326   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
327   for (i=0; i<n; i++) {
328     if (idxin[i] >= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
329     idxout[i] = idxmap[idxin[i]];
330   }
331   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
332   ierr = ISCreateGeneralNC(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
333   PetscFunctionReturn(0);
334 }
335 
336 /*MC
337    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
338    and converts them to the global numbering.
339 
340    Not collective
341 
342    Input Parameters:
343 +  mapping - the local to global mapping context
344 .  N - number of integers
345 -  in - input indices in local numbering
346 
347    Output Parameter:
348 .  out - indices in global numbering
349 
350    Synopsis:
351    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
352 
353    Notes:
354    The in and out array parameters may be identical.
355 
356    Level: advanced
357 
358 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
359           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
360           AOPetscToApplication(), ISGlobalToLocalMappingApply()
361 
362     Concepts: mapping^local to global
363 
364 M*/
365 
366 /* -----------------------------------------------------------------------------------------*/
367 
368 #undef __FUNCT__
369 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
370 /*
371     Creates the global fields in the ISLocalToGlobalMapping structure
372 */
373 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
374 {
375   PetscErrorCode ierr;
376   PetscInt            i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
377 
378   PetscFunctionBegin;
379   end   = 0;
380   start = 100000000;
381 
382   for (i=0; i<n; i++) {
383     if (idx[i] < 0) continue;
384     if (idx[i] < start) start = idx[i];
385     if (idx[i] > end)   end   = idx[i];
386   }
387   if (start > end) {start = 0; end = -1;}
388   mapping->globalstart = start;
389   mapping->globalend   = end;
390 
391   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
392   mapping->globals = globals;
393   for (i=0; i<end-start+1; i++) {
394     globals[i] = -1;
395   }
396   for (i=0; i<n; i++) {
397     if (idx[i] < 0) continue;
398     globals[idx[i] - start] = i;
399   }
400 
401   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
402   PetscFunctionReturn(0);
403 }
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "ISGlobalToLocalMappingApply"
407 /*@
408     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
409     specified with a global numbering.
410 
411     Not collective
412 
413     Input Parameters:
414 +   mapping - mapping between local and global numbering
415 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
416            IS_GTOLM_DROP - drops the indices with no local value from the output list
417 .   n - number of global indices to map
418 -   idx - global indices to map
419 
420     Output Parameters:
421 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
422 -   idxout - local index of each global index, one must pass in an array long enough
423              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
424              idxout == PETSC_NULL to determine the required length (returned in nout)
425              and then allocate the required space and call ISGlobalToLocalMappingApply()
426              a second time to set the values.
427 
428     Notes:
429     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
430 
431     This is not scalable in memory usage. Each processor requires O(Nglobal) size
432     array to compute these.
433 
434     Level: advanced
435 
436     Concepts: mapping^global to local
437 
438 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
439           ISLocalToGlobalMappingDestroy()
440 @*/
441 PetscErrorCode PETSCVEC_DLLEXPORT ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
442                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
443 {
444   PetscInt i,*globals,nf = 0,tmp,start,end;
445   PetscErrorCode ierr;
446 
447   PetscFunctionBegin;
448   if (!mapping->globals) {
449     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
450   }
451   globals = mapping->globals;
452   start   = mapping->globalstart;
453   end     = mapping->globalend;
454 
455   if (type == IS_GTOLM_MASK) {
456     if (idxout) {
457       for (i=0; i<n; i++) {
458         if (idx[i] < 0) idxout[i] = idx[i];
459         else if (idx[i] < start) idxout[i] = -1;
460         else if (idx[i] > end)   idxout[i] = -1;
461         else                     idxout[i] = globals[idx[i] - start];
462       }
463     }
464     if (nout) *nout = n;
465   } else {
466     if (idxout) {
467       for (i=0; i<n; i++) {
468         if (idx[i] < 0) continue;
469         if (idx[i] < start) continue;
470         if (idx[i] > end) continue;
471         tmp = globals[idx[i] - start];
472         if (tmp < 0) continue;
473         idxout[nf++] = tmp;
474       }
475     } else {
476       for (i=0; i<n; i++) {
477         if (idx[i] < 0) continue;
478         if (idx[i] < start) continue;
479         if (idx[i] > end) continue;
480         tmp = globals[idx[i] - start];
481         if (tmp < 0) continue;
482         nf++;
483       }
484     }
485     if (nout) *nout = nf;
486   }
487 
488   PetscFunctionReturn(0);
489 }
490 
491 #undef __FUNCT__
492 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
493 /*@C
494     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
495      each index shared by more than one processor
496 
497     Collective on ISLocalToGlobalMapping
498 
499     Input Parameters:
500 .   mapping - the mapping from local to global indexing
501 
502     Output Parameter:
503 +   nproc - number of processors that are connected to this one
504 .   proc - neighboring processors
505 .   numproc - number of indices for each subdomain (processor)
506 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
507 
508     Level: advanced
509 
510     Concepts: mapping^local to global
511 
512     Fortran Usage:
513 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
514 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
515           PetscInt indices[nproc][numprocmax],ierr)
516         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
517         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
518 
519 
520 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
521           ISLocalToGlobalMappingRestoreInfo()
522 @*/
523 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
524 {
525   PetscErrorCode ierr;
526   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
527   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
528   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
529   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
530   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
531   PetscInt       first_procs,first_numprocs,*first_indices;
532   MPI_Request    *recv_waits,*send_waits;
533   MPI_Status     recv_status,*send_status,*recv_statuses;
534   MPI_Comm       comm = mapping->comm;
535   PetscTruth     debug = PETSC_FALSE;
536 
537   PetscFunctionBegin;
538   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
539   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
540   if (size == 1) {
541     *nproc         = 0;
542     *procs         = PETSC_NULL;
543     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
544     (*numprocs)[0] = 0;
545     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
546     (*indices)[0]  = PETSC_NULL;
547     PetscFunctionReturn(0);
548   }
549 
550   ierr = PetscOptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr);
551 
552   /*
553     Notes on ISLocalToGlobalMappingGetInfo
554 
555     globally owned node - the nodes that have been assigned to this processor in global
556            numbering, just for this routine.
557 
558     nontrivial globally owned node - node assigned to this processor that is on a subdomain
559            boundary (i.e. is has more than one local owner)
560 
561     locally owned node - node that exists on this processors subdomain
562 
563     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
564            local subdomain
565   */
566   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
567   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
568   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
569 
570   for (i=0; i<n; i++) {
571     if (lindices[i] > max) max = lindices[i];
572   }
573   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
574   Ng++;
575   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
576   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
577   scale  = Ng/size + 1;
578   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
579   rstart = scale*rank;
580 
581   /* determine ownership ranges of global indices */
582   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
583   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
584 
585   /* determine owners of each local node  */
586   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
587   for (i=0; i<n; i++) {
588     proc             = lindices[i]/scale; /* processor that globally owns this index */
589     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
590     owner[i]         = proc;
591     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
592   }
593   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
594   ierr = PetscInfo1(0,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
595 
596   /* inform other processors of number of messages and max length*/
597   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
598   ierr = PetscInfo1(0,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
599 
600   /* post receives for owned rows */
601   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
602   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
603   for (i=0; i<nrecvs; i++) {
604     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
605   }
606 
607   /* pack messages containing lists of local nodes to owners */
608   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
609   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
610   starts[0]  = 0;
611   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
612   for (i=0; i<n; i++) {
613     sends[starts[owner[i]]++] = lindices[i];
614     sends[starts[owner[i]]++] = i;
615   }
616   ierr = PetscFree(owner);CHKERRQ(ierr);
617   starts[0]  = 0;
618   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
619 
620   /* send the messages */
621   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
622   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
623   cnt = 0;
624   for (i=0; i<size; i++) {
625     if (nprocs[2*i]) {
626       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
627       dest[cnt] = i;
628       cnt++;
629     }
630   }
631   ierr = PetscFree(starts);CHKERRQ(ierr);
632 
633   /* wait on receives */
634   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
635   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
636   cnt  = nrecvs;
637   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
638   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
639   while (cnt) {
640     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
641     /* unpack receives into our local space */
642     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
643     source[imdex]  = recv_status.MPI_SOURCE;
644     len[imdex]     = len[imdex]/2;
645     /* count how many local owners for each of my global owned indices */
646     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
647     cnt--;
648   }
649   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
650 
651   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
652   nowned  = 0;
653   nownedm = 0;
654   for (i=0; i<ng; i++) {
655     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
656   }
657 
658   /* create single array to contain rank of all local owners of each globally owned index */
659   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
660   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
661   starts[0] = 0;
662   for (i=1; i<ng; i++) {
663     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
664     else starts[i] = starts[i-1];
665   }
666 
667   /* for each nontrival globally owned node list all arriving processors */
668   for (i=0; i<nrecvs; i++) {
669     for (j=0; j<len[i]; j++) {
670       node = recvs[2*i*nmax+2*j]-rstart;
671       if (nownedsenders[node] > 1) {
672         ownedsenders[starts[node]++] = source[i];
673       }
674     }
675   }
676 
677   if (debug) { /* -----------------------------------  */
678     starts[0]    = 0;
679     for (i=1; i<ng; i++) {
680       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
681       else starts[i] = starts[i-1];
682     }
683     for (i=0; i<ng; i++) {
684       if (nownedsenders[i] > 1) {
685         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
686         for (j=0; j<nownedsenders[i]; j++) {
687           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
688         }
689         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
690       }
691     }
692     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
693   }/* -----------------------------------  */
694 
695   /* wait on original sends */
696   if (nsends) {
697     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
698     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
699     ierr = PetscFree(send_status);CHKERRQ(ierr);
700   }
701   ierr = PetscFree(send_waits);CHKERRQ(ierr);
702   ierr = PetscFree(sends);CHKERRQ(ierr);
703   ierr = PetscFree(nprocs);CHKERRQ(ierr);
704 
705   /* pack messages to send back to local owners */
706   starts[0]    = 0;
707   for (i=1; i<ng; i++) {
708     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
709     else starts[i] = starts[i-1];
710   }
711   nsends2 = nrecvs;
712   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
713   for (i=0; i<nrecvs; i++) {
714     nprocs[i] = 1;
715     for (j=0; j<len[i]; j++) {
716       node = recvs[2*i*nmax+2*j]-rstart;
717       if (nownedsenders[node] > 1) {
718         nprocs[i] += 2 + nownedsenders[node];
719       }
720     }
721   }
722   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
723   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
724   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
725   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
726   /*
727      Each message is 1 + nprocs[i] long, and consists of
728        (0) the number of nodes being sent back
729        (1) the local node number,
730        (2) the number of processors sharing it,
731        (3) the processors sharing it
732   */
733   for (i=0; i<nsends2; i++) {
734     cnt = 1;
735     sends2[starts2[i]] = 0;
736     for (j=0; j<len[i]; j++) {
737       node = recvs[2*i*nmax+2*j]-rstart;
738       if (nownedsenders[node] > 1) {
739         sends2[starts2[i]]++;
740         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
741         sends2[starts2[i]+cnt++] = nownedsenders[node];
742         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
743         cnt += nownedsenders[node];
744       }
745     }
746   }
747 
748   /* receive the message lengths */
749   nrecvs2 = nsends;
750   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
751   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
752   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
753   for (i=0; i<nrecvs2; i++) {
754     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
755   }
756 
757   /* send the message lengths */
758   for (i=0; i<nsends2; i++) {
759     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
760   }
761 
762   /* wait on receives of lens */
763   if (nrecvs2) {
764     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
765     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
766     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
767   }
768   ierr = PetscFree(recv_waits);
769 
770   starts3[0] = 0;
771   nt         = 0;
772   for (i=0; i<nrecvs2-1; i++) {
773     starts3[i+1] = starts3[i] + lens2[i];
774     nt          += lens2[i];
775   }
776   nt += lens2[nrecvs2-1];
777 
778   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
779   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
780   for (i=0; i<nrecvs2; i++) {
781     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
782   }
783 
784   /* send the messages */
785   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
786   for (i=0; i<nsends2; i++) {
787     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
788   }
789 
790   /* wait on receives */
791   if (nrecvs2) {
792     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
793     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
794     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
795   }
796   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
797   ierr = PetscFree(nprocs);CHKERRQ(ierr);
798 
799   if (debug) { /* -----------------------------------  */
800     cnt = 0;
801     for (i=0; i<nrecvs2; i++) {
802       nt = recvs2[cnt++];
803       for (j=0; j<nt; j++) {
804         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
805         for (k=0; k<recvs2[cnt+1]; k++) {
806           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
807         }
808         cnt += 2 + recvs2[cnt+1];
809         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
810       }
811     }
812     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
813   } /* -----------------------------------  */
814 
815   /* count number subdomains for each local node */
816   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
817   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
818   cnt  = 0;
819   for (i=0; i<nrecvs2; i++) {
820     nt = recvs2[cnt++];
821     for (j=0; j<nt; j++) {
822       for (k=0; k<recvs2[cnt+1]; k++) {
823         nprocs[recvs2[cnt+2+k]]++;
824       }
825       cnt += 2 + recvs2[cnt+1];
826     }
827   }
828   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
829   *nproc    = nt;
830   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
831   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
832   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
833   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
834   cnt       = 0;
835   for (i=0; i<size; i++) {
836     if (nprocs[i] > 0) {
837       bprocs[i]        = cnt;
838       (*procs)[cnt]    = i;
839       (*numprocs)[cnt] = nprocs[i];
840       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
841       cnt++;
842     }
843   }
844 
845   /* make the list of subdomains for each nontrivial local node */
846   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
847   cnt  = 0;
848   for (i=0; i<nrecvs2; i++) {
849     nt = recvs2[cnt++];
850     for (j=0; j<nt; j++) {
851       for (k=0; k<recvs2[cnt+1]; k++) {
852         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
853       }
854       cnt += 2 + recvs2[cnt+1];
855     }
856   }
857   ierr = PetscFree(bprocs);CHKERRQ(ierr);
858   ierr = PetscFree(recvs2);CHKERRQ(ierr);
859 
860   /* sort the node indexing by their global numbers */
861   nt = *nproc;
862   for (i=0; i<nt; i++) {
863     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
864     for (j=0; j<(*numprocs)[i]; j++) {
865       tmp[j] = lindices[(*indices)[i][j]];
866     }
867     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
868     ierr = PetscFree(tmp);CHKERRQ(ierr);
869   }
870 
871   if (debug) { /* -----------------------------------  */
872     nt = *nproc;
873     for (i=0; i<nt; i++) {
874       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
875       for (j=0; j<(*numprocs)[i]; j++) {
876         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
877       }
878       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
879     }
880     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
881   } /* -----------------------------------  */
882 
883   /* wait on sends */
884   if (nsends2) {
885     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
886     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
887     ierr = PetscFree(send_status);CHKERRQ(ierr);
888   }
889 
890   ierr = PetscFree(starts3);CHKERRQ(ierr);
891   ierr = PetscFree(dest);CHKERRQ(ierr);
892   ierr = PetscFree(send_waits);CHKERRQ(ierr);
893 
894   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
895   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
896   ierr = PetscFree(starts);CHKERRQ(ierr);
897   ierr = PetscFree(starts2);CHKERRQ(ierr);
898   ierr = PetscFree(lens2);CHKERRQ(ierr);
899 
900   ierr = PetscFree(source);CHKERRQ(ierr);
901   ierr = PetscFree(len);CHKERRQ(ierr);
902   ierr = PetscFree(recvs);CHKERRQ(ierr);
903   ierr = PetscFree(nprocs);CHKERRQ(ierr);
904   ierr = PetscFree(sends2);CHKERRQ(ierr);
905 
906   /* put the information about myself as the first entry in the list */
907   first_procs    = (*procs)[0];
908   first_numprocs = (*numprocs)[0];
909   first_indices  = (*indices)[0];
910   for (i=0; i<*nproc; i++) {
911     if ((*procs)[i] == rank) {
912       (*procs)[0]    = (*procs)[i];
913       (*numprocs)[0] = (*numprocs)[i];
914       (*indices)[0]  = (*indices)[i];
915       (*procs)[i]    = first_procs;
916       (*numprocs)[i] = first_numprocs;
917       (*indices)[i]  = first_indices;
918       break;
919     }
920   }
921   PetscFunctionReturn(0);
922 }
923 
924 #undef __FUNCT__
925 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
926 /*@C
927     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
928 
929     Collective on ISLocalToGlobalMapping
930 
931     Input Parameters:
932 .   mapping - the mapping from local to global indexing
933 
934     Output Parameter:
935 +   nproc - number of processors that are connected to this one
936 .   proc - neighboring processors
937 .   numproc - number of indices for each processor
938 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
939 
940     Level: advanced
941 
942 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
943           ISLocalToGlobalMappingGetInfo()
944 @*/
945 PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
946 {
947   PetscErrorCode ierr;
948   PetscInt i;
949 
950   PetscFunctionBegin;
951   ierr = PetscFree(*procs);CHKERRQ(ierr);
952   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
953   if (*indices) {
954     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
955     for (i=1; i<*nproc; i++) {
956       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
957     }
958     ierr = PetscFree(*indices);CHKERRQ(ierr);
959   }
960   PetscFunctionReturn(0);
961 }
962 
963 
964 
965 
966 
967 
968 
969 
970 
971 
972 
973 
974 
975 
976 
977 
978 
979 
980