xref: /petsc/src/vec/is/utils/isltog.c (revision 78058e436722fee464f69e725853e6da35c78593)
1 /*$Id: isltog.c,v 1.49 2000/06/30 03:11:57 bsmith Exp bsmith $*/
2 
3 #include "petscsys.h"   /*I "petscsys.h" I*/
4 #include "src/vec/is/isimpl.h"    /*I "petscis.h"  I*/
5 
6 #undef __FUNC__
7 #define __FUNC__ /*<a name="ISLocalToGlobalMappingGetSize"></a>*/"ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21 .keywords: IS, local-to-global mapping, create
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 int ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,int *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE);
29   *n = mapping->n;
30   PetscFunctionReturn(0);
31 }
32 
33 #undef __FUNC__
34 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingView"
35 /*@C
36     ISLocalToGlobalMappingView - View a local to global mapping
37 
38     Not Collective
39 
40     Input Parameters:
41 +   ltog - local to global mapping
42 -   viewer - viewer
43 
44     Level: advanced
45 
46 .keywords: IS, local-to-global mapping, create
47 
48 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
49 @*/
50 int ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,Viewer viewer)
51 {
52   int        i,ierr,rank;
53   PetscTruth isascii;
54 
55   PetscFunctionBegin;
56   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE);
57   if (!viewer) viewer = VIEWER_STDOUT_(mapping->comm);
58   PetscValidHeaderSpecific(viewer,VIEWER_COOKIE);
59   PetscCheckSameComm(mapping,viewer);
60 
61   ierr = MPI_Comm_rank(mapping->comm,&rank);CHKERRQ(ierr);
62   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
63   if (isascii) {
64     for (i=0; i<mapping->n; i++) {
65       ierr = ViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
66     }
67     ierr = ViewerFlush(viewer);CHKERRQ(ierr);
68   } else {
69     SETERRQ1(1,1,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
70   }
71 
72   PetscFunctionReturn(0);
73 }
74 
75 #undef __FUNC__
76 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingCreateIS"
77 /*@C
78     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
79     ordering and a global parallel ordering.
80 
81     Not collective
82 
83     Input Parameter:
84 .   is - index set containing the global numbers for each local
85 
86     Output Parameter:
87 .   mapping - new mapping data structure
88 
89     Level: advanced
90 
91 .keywords: IS, local-to-global mapping, create
92 
93 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
94 @*/
95 int ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
96 {
97   int      n,*indices,ierr;
98   MPI_Comm comm;
99 
100   PetscFunctionBegin;
101   PetscValidHeaderSpecific(is,IS_COOKIE);
102 
103   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
104   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
105   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
106   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
107   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
108 
109   PetscFunctionReturn(0);
110 }
111 
112 #undef __FUNC__
113 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingCreate"
114 /*@C
115     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
116     ordering and a global parallel ordering.
117 
118     Not Collective, but communicator may have more than one process
119 
120     Input Parameters:
121 +   comm - MPI communicator
122 .   n - the number of local elements
123 -   indices - the global index for each local element
124 
125     Output Parameter:
126 .   mapping - new mapping data structure
127 
128     Level: advanced
129 
130 .keywords: IS, local-to-global mapping, create
131 
132 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
133 @*/
134 int ISLocalToGlobalMappingCreate(MPI_Comm cm,int n,const int indices[],ISLocalToGlobalMapping *mapping)
135 {
136   int ierr;
137 
138   PetscFunctionBegin;
139   PetscValidIntPointer(indices);
140   PetscValidPointer(mapping);
141 
142   PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping",
143                     cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
144   PLogObjectCreate(*mapping);
145   PLogObjectMemory(*mapping,sizeof(struct _p_ISLocalToGlobalMapping)+n*sizeof(int));
146 
147   (*mapping)->n       = n;
148   (*mapping)->indices = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ((*mapping)->indices);
149   ierr = PetscMemcpy((*mapping)->indices,indices,n*sizeof(int));CHKERRQ(ierr);
150 
151   /*
152       Do not create the global to local mapping. This is only created if
153      ISGlobalToLocalMapping() is called
154   */
155   (*mapping)->globals = 0;
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNC__
160 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingDestroy"
161 /*@
162    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
163    ordering and a global parallel ordering.
164 
165    Note Collective
166 
167    Input Parameters:
168 .  mapping - mapping data structure
169 
170    Level: advanced
171 
172 .keywords: IS, local-to-global mapping, destroy
173 
174 .seealso: ISLocalToGlobalMappingCreate()
175 @*/
176 int ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
177 {
178   int ierr;
179   PetscFunctionBegin;
180   PetscValidPointer(mapping);
181   if (--mapping->refct > 0) PetscFunctionReturn(0);
182   if (mapping->refct < 0) {
183     SETERRQ(1,1,"Mapping already destroyed");
184   }
185 
186   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
187   if (mapping->globals) {ierr = PetscFree(mapping->globals);CHKERRQ(ierr);}
188   PLogObjectDestroy(mapping);
189   PetscHeaderDestroy(mapping);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNC__
194 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingApplyIS"
195 /*@
196     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
197     a new index set using the global numbering defined in an ISLocalToGlobalMapping
198     context.
199 
200     Not collective
201 
202     Input Parameters:
203 +   mapping - mapping between local and global numbering
204 -   is - index set in local numbering
205 
206     Output Parameters:
207 .   newis - index set in global numbering
208 
209     Level: advanced
210 
211 .keywords: IS, local-to-global mapping, apply
212 
213 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
214           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
215 @*/
216 int ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
217 {
218   int ierr,n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n;
219 
220   PetscFunctionBegin;
221   PetscValidPointer(mapping);
222   PetscValidHeaderSpecific(is,IS_COOKIE);
223   PetscValidPointer(newis);
224 
225   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
226   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
227   idxmap = mapping->indices;
228 
229   idxout = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ(idxout);
230   for (i=0; i<n; i++) {
231     if (idxin[i] >= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,1,"Local index %d too large %d (max) at %d",idxin[i],Nmax,i);
232     idxout[i] = idxmap[idxin[i]];
233   }
234   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
235   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
236   ierr = PetscFree(idxout);CHKERRQ(ierr);
237   PetscFunctionReturn(0);
238 }
239 
240 /*MC
241    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
242    and converts them to the global numbering.
243 
244    Not collective
245 
246    Input Parameters:
247 +  mapping - the local to global mapping context
248 .  N - number of integers
249 -  in - input indices in local numbering
250 
251    Output Parameter:
252 .  out - indices in global numbering
253 
254    Synopsis:
255    ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
256 
257    Notes:
258    The in and out array parameters may be identical.
259 
260    Level: advanced
261 
262 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
263           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
264           AOPetscToApplication(), ISGlobalToLocalMappingApply()
265 
266 .keywords: local-to-global, mapping, apply
267 
268 M*/
269 
270 /* -----------------------------------------------------------------------------------------*/
271 
272 #undef __FUNC__
273 #define __FUNC__ /*<a name=""></a>*/"ISGlobalToLocalMappingSetUp_Private"
274 /*
275     Creates the global fields in the ISLocalToGlobalMapping structure
276 */
277 static int ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
278 {
279   int i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
280 
281   PetscFunctionBegin;
282   end   = 0;
283   start = 100000000;
284 
285   for (i=0; i<n; i++) {
286     if (idx[i] < 0) continue;
287     if (idx[i] < start) start = idx[i];
288     if (idx[i] > end)   end   = idx[i];
289   }
290   if (start > end) {start = 0; end = -1;}
291   mapping->globalstart = start;
292   mapping->globalend   = end;
293 
294   globals = mapping->globals = (int*)PetscMalloc((end-start+2)*sizeof(int));CHKPTRQ(mapping->globals);
295   for (i=0; i<end-start+1; i++) {
296     globals[i] = -1;
297   }
298   for (i=0; i<n; i++) {
299     if (idx[i] < 0) continue;
300     globals[idx[i] - start] = i;
301   }
302 
303   PLogObjectMemory(mapping,(end-start+1)*sizeof(int));
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNC__
308 #define __FUNC__ /*<a name=""></a>*/"ISGlobalToLocalMappingApply"
309 /*@
310     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
311     specified with a global numbering.
312 
313     Not collective
314 
315     Input Parameters:
316 +   mapping - mapping between local and global numbering
317 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
318            IS_GTOLM_DROP - drops the indices with no local value from the output list
319 .   n - number of global indices to map
320 -   idx - global indices to map
321 
322     Output Parameters:
323 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
324 -   idxout - local index of each global index, one must pass in an array long enough
325              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
326              idxout == PETSC_NULL to determine the required length (returned in nout)
327              and then allocate the required space and call ISGlobalToLocalMappingApply()
328              a second time to set the values.
329 
330     Notes:
331     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
332 
333     This is not scalable in memory usage. Each processor requires O(Nglobal) size
334     array to compute these.
335 
336     Level: advanced
337 
338 .keywords: IS, global-to-local mapping, apply
339 
340 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
341           ISLocalToGlobalMappingDestroy()
342 @*/
343 int ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
344                                   int n,const int idx[],int *nout,int idxout[])
345 {
346   int i,ierr,*globals,nf = 0,tmp,start,end;
347 
348   PetscFunctionBegin;
349   if (!mapping->globals) {
350     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
351   }
352   globals = mapping->globals;
353   start   = mapping->globalstart;
354   end     = mapping->globalend;
355 
356   if (type == IS_GTOLM_MASK) {
357     if (idxout) {
358       for (i=0; i<n; i++) {
359         if (idx[i] < 0) idxout[i] = idx[i];
360         else if (idx[i] < start) idxout[i] = -1;
361         else if (idx[i] > end)   idxout[i] = -1;
362         else                     idxout[i] = globals[idx[i] - start];
363       }
364     }
365     if (nout) *nout = n;
366   } else {
367     if (idxout) {
368       for (i=0; i<n; i++) {
369         if (idx[i] < 0) continue;
370         if (idx[i] < start) continue;
371         if (idx[i] > end) continue;
372         tmp = globals[idx[i] - start];
373         if (tmp < 0) continue;
374         idxout[nf++] = tmp;
375       }
376     } else {
377       for (i=0; i<n; i++) {
378         if (idx[i] < 0) continue;
379         if (idx[i] < start) continue;
380         if (idx[i] > end) continue;
381         tmp = globals[idx[i] - start];
382         if (tmp < 0) continue;
383         nf++;
384       }
385     }
386     if (nout) *nout = nf;
387   }
388 
389   PetscFunctionReturn(0);
390 }
391 
392 #undef __FUNC__
393 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingGetInfo"
394 /*@C
395     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
396      each index shared by more than one processor
397 
398     Collective on ISLocalToGlobalMapping
399 
400     Input Parameters:
401 .   mapping - the mapping from local to global indexing
402 
403     Output Parameter:
404 +   nproc - number of processors that are connected to this one
405 .   proc - neighboring processors
406 .   numproc - number of indices for each subdomain (processor)
407 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
408 
409     Level: advanced
410 
411 .keywords: IS, local-to-global mapping, neighbors
412 
413 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
414           ISLocalToGlobalMappingRestoreInfo()
415 @*/
416 int ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,int *nproc,int **procs,int **numprocs,int ***indices)
417 {
418   int         i,n = mapping->n,ierr,Ng,ng = PETSC_DECIDE,max = 0,*lindices = mapping->indices;
419   int         size,rank,*nprocs,*owner,nsends,*sends,j,*starts,*work,nmax,nrecvs,*recvs,proc;
420   int         tag,cnt,*len,*source,imdex,scale,*ownedsenders,*nownedsenders,rstart,nowned;
421   int         node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
422   MPI_Request *recv_waits,*send_waits;
423   MPI_Status  recv_status,*send_status,*recv_statuses;
424   MPI_Comm    comm = mapping->comm;
425   PetscTruth  debug = PETSC_FALSE;
426 
427   PetscFunctionBegin;
428   ierr = OptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr);
429 
430   /*
431     Notes on ISLocalToGlobalMappingGetInfo
432 
433     globally owned node - the nodes that have been assigned to this processor in global
434            numbering, just for this routine.
435 
436     nontrivial globally owned node - node assigned to this processor that is on a subdomain
437            boundary (i.e. is has more than one local owner)
438 
439     locally owned node - node that exists on this processors subdomain
440 
441     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
442            local subdomain
443   */
444   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag);CHKERRQ(ierr);
445 
446   for (i=0; i<n; i++) {
447     if (lindices[i] > max) max = lindices[i];
448   }
449   ierr   = MPI_Allreduce(&max,&Ng,1,MPI_INT,MPI_MAX,comm);CHKERRQ(ierr);
450   Ng++;
451   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
452   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
453   scale  = Ng/size + 1;
454   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1);
455   rstart = scale*rank;
456 
457   /* determine ownership ranges of global indices */
458   nprocs    = (int*)PetscMalloc((2*size+1)*sizeof(int));CHKPTRQ(nprocs);
459   ierr      = PetscMemzero(nprocs,size*sizeof(int));CHKERRQ(ierr);
460 
461   /* determine owners of each local node  */
462   owner    = (int*)PetscMalloc((n+1)*sizeof(int));CHKPTRQ(owner);
463   for (i=0; i<n; i++) {
464     proc              = lindices[i]/scale; /* processor that globally owns this index */
465     nprocs[size+proc] = 1;                 /* processor globally owns at least one of ours */
466     owner[i]          = proc;
467     nprocs[proc]++;                        /* count of how many that processor globally owns of ours */
468   }
469   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[size + i];
470   PLogInfo(0,"ISLocalToGlobalMappingGetInfo: Number of global owners for my local data %d\n",nsends);
471 
472   /* inform other processors of number of messages and max length*/
473   work   = (int*)PetscMalloc(2*size*sizeof(int));CHKPTRQ(work);
474   ierr   = MPI_Allreduce(nprocs,work,2*size,MPI_INT,PetscMaxSum_Op,comm);CHKERRQ(ierr);
475   nmax   = work[rank];
476   nrecvs = work[size+rank];
477   ierr   = PetscFree(work);CHKERRQ(ierr);
478   PLogInfo(0,"ISLocalToGlobalMappingGetInfo: Number of local owners for my global data %d\n",nrecvs);
479 
480   /* post receives for owned rows */
481   recvs      = (int*)PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(int));CHKPTRQ(recvs);
482   recv_waits = (MPI_Request*)PetscMalloc((nrecvs+1)*sizeof(MPI_Request));CHKPTRQ(recv_waits);
483   for (i=0; i<nrecvs; i++) {
484     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPI_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr);
485   }
486 
487   /* pack messages containing lists of local nodes to owners */
488   sends    = (int*)PetscMalloc((2*n+1)*sizeof(int));CHKPTRQ(sends);
489   starts   = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(starts);
490   starts[0]  = 0;
491   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[i-1];}
492   for (i=0; i<n; i++) {
493     sends[starts[owner[i]]++] = lindices[i];
494     sends[starts[owner[i]]++] = i;
495   }
496   ierr = PetscFree(owner);CHKERRQ(ierr);
497   starts[0]  = 0;
498   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[i-1];}
499 
500   /* send the messages */
501   send_waits = (MPI_Request*)PetscMalloc((nsends+1)*sizeof(MPI_Request));CHKPTRQ(send_waits);
502   dest       = (int*)PetscMalloc((nsends+1)*sizeof(int));CHKPTRQ(dest);
503   cnt = 0;
504   for (i=0; i<size; i++) {
505     if (nprocs[i]) {
506       ierr      = MPI_Isend(sends+starts[i],2*nprocs[i],MPI_INT,i,tag,comm,send_waits+cnt);CHKERRQ(ierr);
507       dest[cnt] = i;
508       cnt++;
509     }
510   }
511   ierr = PetscFree(starts);CHKERRQ(ierr);
512 
513   /* wait on receives */
514   source = (int*)PetscMalloc((2*nrecvs+1)*sizeof(int));CHKPTRQ(source);
515   len    = source + nrecvs;
516   cnt    = nrecvs;
517   nownedsenders = (int*)PetscMalloc((ng+1)*sizeof(int));CHKPTRQ(nownedsenders);
518   ierr          = PetscMemzero(nownedsenders,ng*sizeof(int));CHKERRQ(ierr);
519   while (cnt) {
520     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
521     /* unpack receives into our local space */
522     ierr           = MPI_Get_count(&recv_status,MPI_INT,&len[imdex]);CHKERRQ(ierr);
523     source[imdex]  = recv_status.MPI_SOURCE;
524     len[imdex]     = len[imdex]/2;
525     /* count how many local owners for each of my global owned indices */
526     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
527     cnt--;
528   }
529   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
530 
531   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
532   nowned  = 0;
533   nownedm = 0;
534   for (i=0; i<ng; i++) {
535     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
536   }
537 
538   /* create single array to contain rank of all local owners of each globally owned index */
539   ownedsenders = (int*)PetscMalloc((nownedm+1)*sizeof(int));CHKERRQ(ierr);
540   starts       = (int*)PetscMalloc((ng+1)*sizeof(int));CHKPTRQ(starts);
541   starts[0]    = 0;
542   for (i=1; i<ng; i++) {
543     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
544     else starts[i] = starts[i-1];
545   }
546 
547   /* for each nontrival globally owned node list all arriving processors */
548   for (i=0; i<nrecvs; i++) {
549     for (j=0; j<len[i]; j++) {
550       node = recvs[2*i*nmax+2*j]-rstart;
551       if (nownedsenders[node] > 1) {
552         ownedsenders[starts[node]++] = source[i];
553       }
554     }
555   }
556 
557   if (debug) { /* -----------------------------------  */
558     starts[0]    = 0;
559     for (i=1; i<ng; i++) {
560       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
561       else starts[i] = starts[i-1];
562     }
563     for (i=0; i<ng; i++) {
564       if (nownedsenders[i] > 1) {
565         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
566         for (j=0; j<nownedsenders[i]; j++) {
567           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
568         }
569         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
570       }
571     }
572     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
573   }/* -----------------------------------  */
574 
575   /* wait on original sends */
576   if (nsends) {
577     send_status = (MPI_Status*)PetscMalloc(nsends*sizeof(MPI_Status));CHKPTRQ(send_status);
578     ierr        = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
579     ierr        = PetscFree(send_status);CHKERRQ(ierr);
580   }
581   ierr = PetscFree(send_waits);CHKERRQ(ierr);
582   ierr = PetscFree(sends);CHKERRQ(ierr);
583   ierr = PetscFree(nprocs);CHKERRQ(ierr);
584 
585   /* pack messages to send back to local owners */
586   starts[0]    = 0;
587   for (i=1; i<ng; i++) {
588     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
589     else starts[i] = starts[i-1];
590   }
591   nsends2 = nrecvs;
592   nprocs  = (int*)PetscMalloc((nsends2+1)*sizeof(int));CHKPTRQ(nprocs); /* length of each message */
593   cnt    = 0;
594   for (i=0; i<nrecvs; i++) {
595     nprocs[i] = 1;
596     for (j=0; j<len[i]; j++) {
597       node = recvs[2*i*nmax+2*j]-rstart;
598       if (nownedsenders[node] > 1) {
599         nprocs[i] += 2 + nownedsenders[node];
600       }
601     }
602   }
603   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
604   sends2     = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(sends2);
605   starts2    = (int*)PetscMalloc((nsends2+1)*sizeof(int));CHKPTRQ(starts2);
606   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
607   /*
608      Each message is 1 + nprocs[i] long, and consists of
609        (0) the number of nodes being sent back
610        (1) the local node number,
611        (2) the number of processors sharing it,
612        (3) the processors sharing it
613   */
614   for (i=0; i<nsends2; i++) {
615     cnt = 1;
616     sends2[starts2[i]] = 0;
617     for (j=0; j<len[i]; j++) {
618       node = recvs[2*i*nmax+2*j]-rstart;
619       if (nownedsenders[node] > 1) {
620         sends2[starts2[i]]++;
621         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
622         sends2[starts2[i]+cnt++] = nownedsenders[node];
623         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(int));CHKERRQ(ierr);
624         cnt += nownedsenders[node];
625       }
626     }
627   }
628 
629   /* send the message lengths */
630   for (i=0; i<nsends2; i++) {
631     ierr = MPI_Send(&nprocs[i],1,MPI_INT,source[i],tag-1,comm);CHKERRQ(ierr);
632   }
633 
634   /* receive the message lengths */
635   nrecvs2 = nsends;
636   lens2   = (int*)PetscMalloc((nrecvs2+1)*sizeof(int));CHKPTRQ(lens2);
637   starts3 = (int*)PetscMalloc((nrecvs2+1)*sizeof(int));CHKPTRQ(starts3);
638   nt      = 0;
639   for (i=0; i<nrecvs2; i++) {
640     ierr =  MPI_Recv(&lens2[i],1,MPI_INT,dest[i],tag-1,comm,&recv_status);CHKERRQ(ierr);
641     nt   += lens2[i];
642   }
643   starts3[0] = 0;
644   for (i=0; i<nrecvs2-1; i++) {
645     starts3[i+1] = starts3[i] + lens2[i];
646   }
647   recvs2     = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(recvs2);
648   recv_waits = (MPI_Request*)PetscMalloc((nrecvs2+1)*sizeof(MPI_Request));CHKPTRQ(recv_waits);
649   for (i=0; i<nrecvs2; i++) {
650     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPI_INT,dest[i],tag-2,comm,recv_waits+i);CHKERRQ(ierr);
651   }
652 
653   /* send the messages */
654   send_waits = (MPI_Request*)PetscMalloc((nsends2+1)*sizeof(MPI_Request));CHKPTRQ(send_waits);
655   for (i=0; i<nsends2; i++) {
656     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPI_INT,source[i],tag-2,comm,send_waits+i);CHKERRQ(ierr);
657   }
658 
659   /* wait on receives */
660   recv_statuses = (MPI_Status*)PetscMalloc((nrecvs2+1)*sizeof(MPI_Status));CHKPTRQ(recv_statuses);
661   ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
662   ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
663   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
664   ierr = PetscFree(nprocs);CHKERRQ(ierr);
665 
666   if (debug) { /* -----------------------------------  */
667     cnt = 0;
668     for (i=0; i<nrecvs2; i++) {
669       nt = recvs2[cnt++];
670       for (j=0; j<nt; j++) {
671         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
672         for (k=0; k<recvs2[cnt+1]; k++) {
673           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
674         }
675         cnt += 2 + recvs2[cnt+1];
676         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
677       }
678     }
679     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
680   } /* -----------------------------------  */
681 
682   /* count number subdomains for each local node */
683   nprocs = (int*)PetscMalloc(size*sizeof(int));CHKPTRQ(nprocs);
684   ierr   = PetscMemzero(nprocs,size*sizeof(int));CHKERRQ(ierr);
685   cnt = 0;
686   for (i=0; i<nrecvs2; i++) {
687     nt = recvs2[cnt++];
688     for (j=0; j<nt; j++) {
689       for (k=0; k<recvs2[cnt+1]; k++) {
690         nprocs[recvs2[cnt+2+k]]++;
691       }
692       cnt += 2 + recvs2[cnt+1];
693     }
694   }
695   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
696   *nproc    = nt;
697   *procs    = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(procs);
698   *numprocs = (int*)PetscMalloc((nt+1)*sizeof(int));CHKPTRQ(numprocs);
699   *indices  = (int**)PetscMalloc((nt+1)*sizeof(int*));CHKPTRQ(procs);
700   bprocs    = (int*)PetscMalloc(size*sizeof(int));CHKERRQ(ierr);
701   cnt       = 0;
702   for (i=0; i<size; i++) {
703     if (nprocs[i] > 0) {
704       bprocs[i]        = cnt;
705       (*procs)[cnt]    = i;
706       (*numprocs)[cnt] = nprocs[i];
707       (*indices)[cnt]  = (int*)PetscMalloc(nprocs[i]*sizeof(int));CHKPTRQ((*indices)[cnt]);
708       cnt++;
709     }
710   }
711 
712   /* make the list of subdomains for each nontrivial local node */
713   ierr = PetscMemzero(*numprocs,nt*sizeof(int));CHKERRQ(ierr);
714   cnt  = 0;
715   for (i=0; i<nrecvs2; i++) {
716     nt = recvs2[cnt++];
717     for (j=0; j<nt; j++) {
718       for (k=0; k<recvs2[cnt+1]; k++) {
719         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
720       }
721       cnt += 2 + recvs2[cnt+1];
722     }
723   }
724   ierr = PetscFree(bprocs);CHKERRQ(ierr);
725   ierr = PetscFree(recvs2);CHKERRQ(ierr);
726 
727   /* sort the node indexing by their global numbers */
728   nt = *nproc;
729   for (i=0; i<nt; i++) {
730     tmp = (int*)PetscMalloc(((*numprocs)[i])*sizeof(int));CHKPTRQ(tmp);
731     for (j=0; j<(*numprocs)[i]; j++) {
732       tmp[j] = lindices[(*indices)[i][j]];
733     }
734     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
735     ierr = PetscFree(tmp);CHKERRQ(ierr);
736   }
737 
738   if (debug) { /* -----------------------------------  */
739     nt = *nproc;
740     for (i=0; i<nt; i++) {
741       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
742       for (j=0; j<(*numprocs)[i]; j++) {
743         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
744       }
745       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
746     }
747     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
748   } /* -----------------------------------  */
749 
750   /* wait on sends */
751   if (nsends2) {
752     send_status = (MPI_Status*)PetscMalloc(nsends2*sizeof(MPI_Status));CHKPTRQ(send_status);
753     ierr        = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
754     ierr        = PetscFree(send_status);CHKERRQ(ierr);
755   }
756 
757   ierr = PetscFree(starts3);CHKERRQ(ierr);
758   ierr = PetscFree(dest);CHKERRQ(ierr);
759   ierr = PetscFree(send_waits);CHKERRQ(ierr);
760 
761   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
762   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
763   ierr = PetscFree(starts);CHKERRQ(ierr);
764   ierr = PetscFree(starts2);CHKERRQ(ierr);
765   ierr = PetscFree(lens2);CHKERRQ(ierr);
766 
767   ierr = PetscFree(source);CHKERRQ(ierr);
768   ierr = PetscFree(recvs);CHKERRQ(ierr);
769   ierr = PetscFree(nprocs);CHKERRQ(ierr);
770   ierr = PetscFree(sends2);CHKERRQ(ierr);
771   PetscFunctionReturn(0);
772 }
773 
774 #undef __FUNC__
775 #define __FUNC__ /*<a name=""></a>*/"ISLocalToGlobalMappingRestoreInfo"
776 /*@C
777     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
778 
779     Collective on ISLocalToGlobalMapping
780 
781     Input Parameters:
782 .   mapping - the mapping from local to global indexing
783 
784     Output Parameter:
785 +   nproc - number of processors that are connected to this one
786 .   proc - neighboring processors
787 .   numproc - number of indices for each processor
788 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
789 
790     Level: advanced
791 
792 .keywords: IS, local-to-global mapping, neighbors
793 
794 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
795           ISLocalToGlobalMappingGetInfo()
796 @*/
797 int ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,int *nproc,int **procs,int **numprocs,int ***indices)
798 {
799   int ierr,i;
800 
801   PetscFunctionBegin;
802   ierr = PetscFree(*procs);CHKERRQ(ierr);
803   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
804   for (i=0; i<*nproc; i++) {
805     ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
806   }
807   ierr = PetscFree(*indices);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 
812 
813 
814 
815 
816 
817 
818 
819 
820 
821 
822 
823 
824 
825 
826 
827