xref: /petsc/src/vec/is/utils/isltog.c (revision 52b72c4a5f2168028342462ad52ffa0bd1a3ed5e)
1 /*$Id: isltog.c,v 1.48 2000/06/26 20:18:29 bsmith Exp bsmith $*/
2 
3 #include "petscsys.h"   /*I "petscsys.h" I*/
4 #include "src/vec/is/isimpl.h"    /*I "petscis.h"  I*/
5 
6 #undef __FUNC__
7 #define __FUNC__ /*<a name="ISLocalToGlobalMappingGetSize"></a>*/"ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21 .keywords: IS, local-to-global mapping, create
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 int ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,int *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE);
29   *n = mapping->n;
30   PetscFunctionReturn(0);
31 }
32 
33 #undef __FUNC__
34 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingView"
35 /*@C
36     ISLocalToGlobalMappingView - View a local to global mapping
37 
38     Not Collective
39 
40     Input Parameters:
41 +   ltog - local to global mapping
42 -   viewer - viewer
43 
44     Level: advanced
45 
46 .keywords: IS, local-to-global mapping, create
47 
48 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
49 @*/
50 int ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,Viewer viewer)
51 {
52   int        i,ierr,rank;
53   PetscTruth isascii;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE);
57   if (!viewer) viewer = VIEWER_STDOUT_(mapping->comm);
58   PetscValidHeaderSpecific(viewer,VIEWER_COOKIE);
59   PetscCheckSameComm(mapping,viewer);
60 
61   ierr = MPI_Comm_rank(mapping->comm,&rank);CHKERRQ(ierr);
62   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
63   if (isascii) {
64     for (i=0; i<mapping->n; i++) {
65       ierr = ViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
66     }
67     ierr = ViewerFlush(viewer);CHKERRQ(ierr);
68   } else {
69     SETERRQ1(1,1,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
70   }
71 
72   PetscFunctionReturn(0);
73 }
74 
75 #undef __FUNC__
76 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingCreateIS"
77 /*@C
78     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
79     ordering and a global parallel ordering.
80 
81     Not collective
82 
83     Input Parameter:
84 .   is - index set containing the global numbers for each local
85 
86     Output Parameter:
87 .   mapping - new mapping data structure
88 
89     Level: advanced
90 
91 .keywords: IS, local-to-global mapping, create
92 
93 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
94 @*/
95 int ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
96 {
97   int      n,*indices,ierr;
98   MPI_Comm comm;
99 
100   PetscFunctionBegin;
101   PetscValidHeaderSpecific(is,IS_COOKIE);
102 
103   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
104   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
105   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
106   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
107   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
108 
109   PetscFunctionReturn(0);
110 }
111 
112 #undef __FUNC__
113 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingCreate"
114 /*@C
115     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
116     ordering and a global parallel ordering.
117 
118     Not Collective, but communicator may have more than one process
119 
120     Input Parameters:
121 +   comm - MPI communicator
122 .   n - the number of local elements
123 -   indices - the global index for each local element
124 
125     Output Parameter:
126 .   mapping - new mapping data structure
127 
128     Level: advanced
129 
130 .keywords: IS, local-to-global mapping, create
131 
132 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
133 @*/
134 int ISLocalToGlobalMappingCreate(MPI_Comm cm,int n,const int indices[],ISLocalToGlobalMapping *mapping)
135 {
136   int ierr;
137 
138   PetscFunctionBegin;
139   PetscValidIntPointer(indices);
140   PetscValidPointer(mapping);
141 
142   PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping",
143                     cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
144   PLogObjectCreate(*mapping);
145   PLogObjectMemory(*mapping,sizeof(struct _p_ISLocalToGlobalMapping)+n*sizeof(int));
146 
147   (*mapping)->n       = n;
148   (*mapping)->indices = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ((*mapping)->indices);
149   ierr = PetscMemcpy((*mapping)->indices,indices,n*sizeof(int));CHKERRQ(ierr);
150 
151   /*
152       Do not create the global to local mapping. This is only created if
153      ISGlobalToLocalMapping() is called
154   */
155   (*mapping)->globals = 0;
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNC__
160 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingDestroy"
161 /*@
162    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
163    ordering and a global parallel ordering.
164 
165    Note Collective
166 
167    Input Parameters:
168 .  mapping - mapping data structure
169 
170    Level: advanced
171 
172 .keywords: IS, local-to-global mapping, destroy
173 
174 .seealso: ISLocalToGlobalMappingCreate()
175 @*/
176 int ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
177 {
178   int ierr;
179   PetscFunctionBegin;
180   PetscValidPointer(mapping);
181   if (--mapping->refct > 0) PetscFunctionReturn(0);
182   if (mapping->refct < 0) {
183     SETERRQ(1,1,"Mapping already destroyed");
184   }
185 
186   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
187   if (mapping->globals) {ierr = PetscFree(mapping->globals);CHKERRQ(ierr);}
188   PLogObjectDestroy(mapping);
189   PetscHeaderDestroy(mapping);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNC__
194 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingApplyIS"
195 /*@
196     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
197     a new index set using the global numbering defined in an ISLocalToGlobalMapping
198     context.
199 
200     Not collective
201 
202     Input Parameters:
203 +   mapping - mapping between local and global numbering
204 -   is - index set in local numbering
205 
206     Output Parameters:
207 .   newis - index set in global numbering
208 
209     Level: advanced
210 
211 .keywords: IS, local-to-global mapping, apply
212 
213 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
214           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
215 @*/
216 int ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
217 {
218   int ierr,n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n;
219 
220   PetscFunctionBegin;
221   PetscValidPointer(mapping);
222   PetscValidHeaderSpecific(is,IS_COOKIE);
223   PetscValidPointer(newis);
224 
225   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
226   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
227   idxmap = mapping->indices;
228 
229   idxout = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ(idxout);
230   for (i=0; i<n; i++) {
231     if (idxin[i] >= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,1,"Local index %d too large %d (max) at %d",idxin[i],Nmax,i);
232     idxout[i] = idxmap[idxin[i]];
233   }
234   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
235   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
236   ierr = PetscFree(idxout);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 /*MC
241    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
242    and converts them to the global numbering.
243 
244    Not collective
245 
246    Input Parameters:
247 +  mapping - the local to global mapping context
248 .  N - number of integers
249 -  in - input indices in local numbering
250 
251    Output Parameter:
252 .  out - indices in global numbering
253 
254    Synopsis:
255    ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
256 
257    Notes:
258    The in and out array parameters may be identical.
259 
260    Level: advanced
261 
262 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
263           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
264           AOPetscToApplication(), ISGlobalToLocalMappingApply()
265 
266 .keywords: local-to-global, mapping, apply
267 
268 M*/
269 
270 /* -----------------------------------------------------------------------------------------*/
271 
272 #undef __FUNC__
273 #define __FUNC__ /*<a name=""></a>*/"ISGlobalToLocalMappingSetUp_Private"
274 /*
275     Creates the global fields in the ISLocalToGlobalMapping structure
276 */
277 static int ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
278 {
279   int i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
280 
281   PetscFunctionBegin;
282   end   = 0;
283   start = 100000000;
284 
285   for (i=0; i<n; i++) {
286     if (idx[i] < 0) continue;
287     if (idx[i] < start) start = idx[i];
288     if (idx[i] > end)   end   = idx[i];
289   }
290   if (start > end) {start = 0; end = -1;}
291   mapping->globalstart = start;
292   mapping->globalend   = end;
293 
294   globals = mapping->globals = (int*)PetscMalloc((end-start+2)*sizeof(int));CHKPTRQ(mapping->globals);
295   for (i=0; i<end-start+1; i++) {
296     globals[i] = -1;
297   }
298   for (i=0; i<n; i++) {
299     if (idx[i] < 0) continue;
300     globals[idx[i] - start] = i;
301   }
302 
303   PLogObjectMemory(mapping,(end-start+1)*sizeof(int));
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNC__
308 #define __FUNC__ /*<a name=""></a>*/"ISGlobalToLocalMappingApply"
309 /*@
310     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
311     specified with a global numbering.
312 
313     Not collective
314 
315     Input Parameters:
316 +   mapping - mapping between local and global numbering
317 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
318            IS_GTOLM_DROP - drops the indices with no local value from the output list
319 .   n - number of global indices to map
320 -   idx - global indices to map
321 
322     Output Parameters:
323 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
324 -   idxout - local index of each global index, one must pass in an array long enough
325              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
326              idxout == PETSC_NULL to determine the required length (returned in nout)
327              and then allocate the required space and call ISGlobalToLocalMappingApply()
328              a second time to set the values.
329 
330     Notes:
331     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
332 
333     This is not scalable in memory usage. Each processor requires O(Nglobal) size
334     array to compute these.
335 
336     Level: advanced
337 
338 .keywords: IS, global-to-local mapping, apply
339 
340 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
341           ISLocalToGlobalMappingDestroy()
342 @*/
343 int ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
344                                   int n,const int idx[],int *nout,int idxout[])
345 {
346   int i,ierr,*globals,nf = 0,tmp,start,end;
347 
348   PetscFunctionBegin;
349   if (!mapping->globals) {
350     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
351   }
352   globals = mapping->globals;
353   start   = mapping->globalstart;
354   end     = mapping->globalend;
355 
356   if (type == IS_GTOLM_MASK) {
357     if (idxout) {
358       for (i=0; i<n; i++) {
359         if (idx[i] < 0) idxout[i] = idx[i];
360         else if (idx[i] < start) idxout[i] = -1;
361         else if (idx[i] > end)   idxout[i] = -1;
362         else                     idxout[i] = globals[idx[i] - start];
363       }
364     }
365     if (nout) *nout = n;
366   } else {
367     if (idxout) {
368       for (i=0; i<n; i++) {
369         if (idx[i] < 0) continue;
370         if (idx[i] < start) continue;
371         if (idx[i] > end) continue;
372         tmp = globals[idx[i] - start];
373         if (tmp < 0) continue;
374         idxout[nf++] = tmp;
375       }
376     } else {
377       for (i=0; i<n; i++) {
378         if (idx[i] < 0) continue;
379         if (idx[i] < start) continue;
380         if (idx[i] > end) continue;
381         tmp = globals[idx[i] - start];
382         if (tmp < 0) continue;
383         nf++;
384       }
385     }
386     if (nout) *nout = nf;
387   }
388 
389   PetscFunctionReturn(0);
390 }
391 
392 #undef __FUNC__
393 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingGetInfo"
394 /*@C
395     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
396      each index shared by more than one processor
397 
398     Collective on ISLocalToGlobalMapping
399 
400     Input Parameters:
401 .   mapping - the mapping from local to global indexing
402 
403     Output Parameter:
404 +   nproc - number of processors that are connected to this one
405 .   proc - neighboring processors
406 .   numproc - number of indices for each subdomain (processor)
407 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
408 
409     Level: advanced
410 
411 .keywords: IS, local-to-global mapping, neighbors
412 
413 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
414           ISLocalToGlobalMappingRestoreInfo()
415 @*/
416 int ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,int *nproc,int **procs,int **numprocs,int ***indices)
417 {
418   int         i,n = mapping->n,ierr,Ng,ng = PETSC_DECIDE,max = 0,*lindices = mapping->indices;
419   int         size,rank,*nprocs,*owner,nsends,*sends,j,*starts,*work,nmax,nrecvs,*recvs,proc;
420   int         tag,cnt,*len,*source,imdex,scale,*ownedsenders,*nownedsenders,rstart,nowned;
421   int         node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
422   MPI_Request *recv_waits,*send_waits;
423   MPI_Status  recv_status,*send_status,*recv_statuses;
424   MPI_Comm    comm = mapping->comm;
425   PetscTruth  debug = PETSC_FALSE;
426 
427   PetscFunctionBegin;
428   ierr = OptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr);
429 
430   /*
431     Notes on ISLocalToGlobalMappingGetInfo
432 
433     globally owned node - the nodes that have been assigned to this processor in global
434            numbering, just for this routine.
435 
436     nontrivial globally owned node - node assigned to this processor that is on a subdomain
437            boundary (i.e. is has more than one local owner)
438 
439     locally owned node - node that exists on this processors subdomain
440 
441     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
442            local subdomain
443   */
444   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag);CHKERRQ(ierr);
445 
446   for (i=0; i<n; i++) {
447     if (lindices[i] > max) max = lindices[i];
448   }
449   ierr   = MPI_Allreduce(&max,&Ng,1,MPI_INT,MPI_MAX,comm);CHKERRQ(ierr);
450   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
451   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
452   scale  = Ng/size + 1;
453   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1);
454   rstart = scale*rank;
455 
456   /* determine ownership ranges of global indices */
457   nprocs    = (int*)PetscMalloc((2*size+1)*sizeof(int));CHKPTRQ(nprocs);
458   ierr      = PetscMemzero(nprocs,size*sizeof(int));CHKERRQ(ierr);
459 
460   /* determine owners of each local node  */
461   owner    = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ(owner);
462   for (i=0; i<n; i++) {
463     proc              = lindices[i]/scale; /* processor that globally owns this index */
464     nprocs[size+proc] = 1;                 /* processor globally owns at least one of ours */
465     owner[i]          = proc;
466     nprocs[proc]++;                        /* count of how many that processor globally owns of ours */
467   }
468   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[size + i];
469   PLogInfo(0,"ISLocalToGlobalMappingGetInfo: Number of global owners for my local data %d\n",nsends);
470 
471   /* inform other processors of number of messages and max length*/
472   work   = (int*)PetscMalloc(2*size*sizeof(int));CHKPTRQ(work);
473   ierr   = MPI_Allreduce(nprocs,work,2*size,MPI_INT,PetscMaxSum_Op,comm);CHKERRQ(ierr);
474   nmax   = work[rank];
475   nrecvs = work[size+rank];
476   ierr   = PetscFree(work);CHKERRQ(ierr);
477   PLogInfo(0,"ISLocalToGlobalMappingGetInfo: Number of local owners for my global data %d\n",nrecvs);
478 
479   /* post receives for owned rows */
480   recvs      = (int*)PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(int));CHKPTRQ(recvs);
481   recv_waits = (MPI_Request*)PetscMalloc((nrecvs+1)*sizeof(MPI_Request));CHKPTRQ(recv_waits);
482   for (i=0; i<nrecvs; i++) {
483     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPI_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
484   }
485 
486   /* pack messages containing lists of local nodes to owners */
487   sends    = (int*)PetscMalloc((2*n+1)*sizeof(int));CHKPTRQ(sends);
488   starts   = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(starts);
489   starts[0]  = 0;
490   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[i-1];}
491   for (i=0; i<n; i++) {
492     sends[starts[owner[i]]++] = lindices[i];
493     sends[starts[owner[i]]++] = i;
494   }
495   ierr = PetscFree(owner);CHKERRQ(ierr);
496   starts[0]  = 0;
497   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[i-1];}
498 
499   /* send the messages */
500   send_waits = (MPI_Request*)PetscMalloc((nsends+1)*sizeof(MPI_Request));CHKPTRQ(send_waits);
501   dest       = (int*)PetscMalloc((nsends+1)*sizeof(int));CHKPTRQ(dest);
502   cnt = 0;
503   for (i=0; i<size; i++) {
504     if (nprocs[i]) {
505       ierr      = MPI_Isend(sends+starts[i],2*nprocs[i],MPI_INT,i,tag,comm,send_waits+cnt);CHKERRQ(ierr);
506       dest[cnt] = i;
507       cnt++;
508     }
509   }
510   ierr = PetscFree(starts);CHKERRQ(ierr);
511 
512   /* wait on receives */
513   source = (int*)PetscMalloc((2*nrecvs+1)*sizeof(int));CHKPTRQ(source);
514   len    = source + nrecvs;
515   cnt    = nrecvs;
516   nownedsenders = (int*)PetscMalloc((ng+1)*sizeof(int));CHKPTRQ(nownedsenders);
517   ierr          = PetscMemzero(nownedsenders,ng*sizeof(int));CHKERRQ(ierr);
518   while (cnt) {
519     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
520     /* unpack receives into our local space */
521     ierr           = MPI_Get_count(&recv_status,MPI_INT,&len[imdex]);CHKERRQ(ierr);
522     source[imdex]  = recv_status.MPI_SOURCE;
523     len[imdex]     = len[imdex]/2;
524     /* count how many local owners for each of my global owned indices */
525     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
526     cnt--;
527   }
528   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
529 
530   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
531   nowned  = 0;
532   nownedm = 0;
533   for (i=0; i<ng; i++) {
534     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
535   }
536 
537   /* create single array to contain rank of all local owners of each globally owned index */
538   ownedsenders = (int*)PetscMalloc((nownedm+1)*sizeof(int));CHKERRQ(ierr);
539   starts       = (int*)PetscMalloc((ng+1)*sizeof(int));CHKPTRQ(starts);
540   starts[0]    = 0;
541   for (i=1; i<ng; i++) {
542     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
543     else starts[i] = starts[i-1];
544   }
545 
546   /* for each nontrival globally owned node list all arriving processors */
547   for (i=0; i<nrecvs; i++) {
548     for (j=0; j<len[i]; j++) {
549       node = recvs[2*i*nmax+2*j]-rstart;
550       if (nownedsenders[node] > 1) {
551         ownedsenders[starts[node]++] = source[i];
552       }
553     }
554   }
555 
556   if (debug) { /* -----------------------------------  */
557     starts[0]    = 0;
558     for (i=1; i<ng; i++) {
559       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
560       else starts[i] = starts[i-1];
561     }
562     for (i=0; i<ng; i++) {
563       if (nownedsenders[i] > 1) {
564         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
565         for (j=0; j<nownedsenders[i]; j++) {
566           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
567         }
568         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
569       }
570     }
571     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
572   }/* -----------------------------------  */
573 
574   /* wait on original sends */
575   if (nsends) {
576     send_status = (MPI_Status*)PetscMalloc(nsends*sizeof(MPI_Status));CHKPTRQ(send_status);
577     ierr        = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
578     ierr        = PetscFree(send_status);CHKERRQ(ierr);
579   }
580   ierr = PetscFree(send_waits);CHKERRQ(ierr);
581   ierr = PetscFree(sends);CHKERRQ(ierr);
582   ierr = PetscFree(nprocs);CHKERRQ(ierr);
583 
584   /* pack messages to send back to local owners */
585   starts[0]    = 0;
586   for (i=1; i<ng; i++) {
587     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
588     else starts[i] = starts[i-1];
589   }
590   nsends2 = nrecvs;
591   nprocs  = (int*)PetscMalloc((nsends2+1)*sizeof(int));CHKPTRQ(nprocs); /* length of each message */
592   cnt    = 0;
593   for (i=0; i<nrecvs; i++) {
594     nprocs[i] = 1;
595     for (j=0; j<len[i]; j++) {
596       node = recvs[2*i*nmax+2*j]-rstart;
597       if (nownedsenders[node] > 1) {
598         nprocs[i] += 2 + nownedsenders[node];
599       }
600     }
601   }
602   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
603   sends2     = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(sends2);
604   starts2    = (int*)PetscMalloc((nsends2+1)*sizeof(int));CHKPTRQ(starts2);
605   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
606   /*
607      Each message is 1 + nprocs[i] long, and consists of
608        (0) the number of nodes being sent back
609        (1) the local node number,
610        (2) the number of processors sharing it,
611        (3) the processors sharing it
612   */
613   for (i=0; i<nsends2; i++) {
614     cnt = 1;
615     sends2[starts2[i]] = 0;
616     for (j=0; j<len[i]; j++) {
617       node = recvs[2*i*nmax+2*j]-rstart;
618       if (nownedsenders[node] > 1) {
619         sends2[starts2[i]]++;
620         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
621         sends2[starts2[i]+cnt++] = nownedsenders[node];
622         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(int));CHKERRQ(ierr);
623         cnt += nownedsenders[node];
624       }
625     }
626   }
627 
628   /* send the message lengths */
629   for (i=0; i<nsends2; i++) {
630     ierr = MPI_Send(&nprocs[i],1,MPI_INT,source[i],tag-1,comm);CHKERRQ(ierr);
631   }
632 
633   /* receive the message lengths */
634   nrecvs2 = nsends;
635   lens2   = (int*)PetscMalloc((nrecvs2+1)*sizeof(int));CHKPTRQ(lens2);
636   starts3 = (int*)PetscMalloc((nrecvs2+1)*sizeof(int));CHKPTRQ(starts3);
637   nt      = 0;
638   for (i=0; i<nrecvs2; i++) {
639     ierr =  MPI_Recv(&lens2[i],1,MPI_INT,dest[i],tag-1,comm,&recv_status);CHKERRQ(ierr);
640     nt   += lens2[i];
641   }
642   starts3[0] = 0;
643   for (i=0; i<nrecvs2-1; i++) {
644     starts3[i+1] = starts3[i] + lens2[i];
645   }
646   recvs2     = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(recvs2);
647   recv_waits = (MPI_Request*)PetscMalloc((nrecvs2+1)*sizeof(MPI_Request));CHKPTRQ(recv_waits);
648   for (i=0; i<nrecvs2; i++) {
649     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPI_INT,dest[i],tag-2,comm,recv_waits+i);CHKERRQ(ierr);
650   }
651 
652   /* send the messages */
653   send_waits = (MPI_Request*)PetscMalloc((nsends2+1)*sizeof(MPI_Request));CHKPTRQ(send_waits);
654   for (i=0; i<nsends2; i++) {
655     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPI_INT,source[i],tag-2,comm,send_waits+i);CHKERRQ(ierr);
656   }
657 
658   /* wait on receives */
659   recv_statuses = (MPI_Status*)PetscMalloc((nrecvs2+1)*sizeof(MPI_Status));CHKPTRQ(recv_statuses);
660   ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
661   ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
662   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
663   ierr = PetscFree(nprocs);CHKERRQ(ierr);
664 
665   if (debug) { /* -----------------------------------  */
666     cnt = 0;
667     for (i=0; i<nrecvs2; i++) {
668       nt = recvs2[cnt++];
669       for (j=0; j<nt; j++) {
670         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
671         for (k=0; k<recvs2[cnt+1]; k++) {
672           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
673         }
674         cnt += 2 + recvs2[cnt+1];
675         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
676       }
677     }
678     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
679   } /* -----------------------------------  */
680 
681   /* count number subdomains for each local node */
682   nprocs = (int*)PetscMalloc(size*sizeof(int));CHKPTRQ(nprocs);
683   ierr   = PetscMemzero(nprocs,size*sizeof(int));CHKERRQ(ierr);
684   cnt = 0;
685   for (i=0; i<nrecvs2; i++) {
686     nt = recvs2[cnt++];
687     for (j=0; j<nt; j++) {
688       for (k=0; k<recvs2[cnt+1]; k++) {
689         nprocs[recvs2[cnt+2+k]]++;
690       }
691       cnt += 2 + recvs2[cnt+1];
692     }
693   }
694   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
695   *nproc    = nt;
696   *procs    = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(procs);
697   *numprocs = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(numprocs);
698   *indices  = (int**)PetscMalloc((nt+1)*sizeof(int*));CHKPTRQ(procs);
699   bprocs    = (int*)PetscMalloc(size*sizeof(int));CHKERRQ(ierr);
700   cnt       = 0;
701   for (i=0; i<size; i++) {
702     if (nprocs[i] > 0) {
703       bprocs[i]        = cnt;
704       (*procs)[cnt]    = i;
705       (*numprocs)[cnt] = nprocs[i];
706       (*indices)[cnt]  = (int*)PetscMalloc(nprocs[i]*sizeof(int));CHKPTRQ((*indices)[cnt]);
707       cnt++;
708     }
709   }
710 
711   /* make the list of subdomains for each nontrivial local node */
712   ierr = PetscMemzero(*numprocs,nt*sizeof(int));CHKERRQ(ierr);
713   cnt  = 0;
714   for (i=0; i<nrecvs2; i++) {
715     nt = recvs2[cnt++];
716     for (j=0; j<nt; j++) {
717       for (k=0; k<recvs2[cnt+1]; k++) {
718         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
719       }
720       cnt += 2 + recvs2[cnt+1];
721     }
722   }
723   ierr = PetscFree(bprocs);CHKERRQ(ierr);
724   ierr = PetscFree(recvs2);CHKERRQ(ierr);
725 
726   /* sort the node indexing by their global numbers */
727   nt = *nproc;
728   for (i=0; i<nt; i++) {
729     tmp = (int*)PetscMalloc(((*numprocs)[i])*sizeof(int));CHKPTRQ(tmp);
730     for (j=0; j<(*numprocs)[i]; j++) {
731       tmp[j] = lindices[(*indices)[i][j]];
732     }
733     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
734     ierr = PetscFree(tmp);CHKERRQ(ierr);
735   }
736 
737   if (debug) { /* -----------------------------------  */
738     nt = *nproc;
739     for (i=0; i<nt; i++) {
740       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
741       for (j=0; j<(*numprocs)[i]; j++) {
742         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
743       }
744       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
745     }
746     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
747   } /* -----------------------------------  */
748 
749   /* wait on sends */
750   if (nsends2) {
751     send_status = (MPI_Status*)PetscMalloc(nsends2*sizeof(MPI_Status));CHKPTRQ(send_status);
752     ierr        = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
753     ierr        = PetscFree(send_status);CHKERRQ(ierr);
754   }
755 
756   ierr = PetscFree(starts3);CHKERRQ(ierr);
757   ierr = PetscFree(dest);CHKERRQ(ierr);
758   ierr = PetscFree(send_waits);CHKERRQ(ierr);
759 
760   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
761   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
762   ierr = PetscFree(starts);CHKERRQ(ierr);
763   ierr = PetscFree(starts2);CHKERRQ(ierr);
764   ierr = PetscFree(lens2);CHKERRQ(ierr);
765 
766   ierr = PetscFree(source);CHKERRQ(ierr);
767   ierr = PetscFree(recvs);CHKERRQ(ierr);
768   ierr = PetscFree(nprocs);CHKERRQ(ierr);
769   ierr = PetscFree(sends2);CHKERRQ(ierr);
770   PetscFunctionReturn(0);
771 }
772 
773 #undef __FUNC__
774 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingRestoreInfo"
775 /*@C
776     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
777 
778     Collective on ISLocalToGlobalMapping
779 
780     Input Parameters:
781 .   mapping - the mapping from local to global indexing
782 
783     Output Parameter:
784 +   nproc - number of processors that are connected to this one
785 .   proc - neighboring processors
786 .   numproc - number of indices for each processor
787 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
788 
789     Level: advanced
790 
791 .keywords: IS, local-to-global mapping, neighbors
792 
793 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
794           ISLocalToGlobalMappingGetInfo()
795 @*/
796 int ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,int *nproc,int **procs,int **numprocs,int ***indices)
797 {
798   int ierr,i;
799 
800   PetscFunctionBegin;
801   ierr = PetscFree(*procs);CHKERRQ(ierr);
802   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
803   for (i=0; i<*nproc; i++) {
804     ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
805   }
806   ierr = PetscFree(*indices);CHKERRQ(ierr);
807   PetscFunctionReturn(0);
808 }
809 
810 
811 
812 
813 
814 
815 
816 
817 
818 
819 
820 
821 
822 
823 
824 
825 
826