xref: /petsc/src/vec/is/utils/isltog.c (revision 4c8c32f384357201972f9dd67b0f2e901704fd35)
1 
2 #include "petscsys.h"   /*I "petscsys.h" I*/
3 #include "src/vec/is/isimpl.h"    /*I "petscis.h"  I*/
4 
5 EXTERN PetscErrorCode VecInitializePackage(char *);
6 PetscCookie IS_LTOGM_COOKIE = -1;
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
10 /*@C
11     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
12 
13     Not Collective
14 
15     Input Parameter:
16 .   ltog - local to global mapping
17 
18     Output Parameter:
19 .   n - the number of entries in the local mapping
20 
21     Level: advanced
22 
23     Concepts: mapping^local to global
24 
25 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
26 @*/
27 PetscErrorCode ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,int *n)
28 {
29   PetscFunctionBegin;
30   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
31   PetscValidIntPointer(n,2);
32   *n = mapping->n;
33   PetscFunctionReturn(0);
34 }
35 
36 #undef __FUNCT__
37 #define __FUNCT__ "ISLocalToGlobalMappingView"
38 /*@C
39     ISLocalToGlobalMappingView - View a local to global mapping
40 
41     Not Collective
42 
43     Input Parameters:
44 +   ltog - local to global mapping
45 -   viewer - viewer
46 
47     Level: advanced
48 
49     Concepts: mapping^local to global
50 
51 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
52 @*/
53 PetscErrorCode ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
54 {
55   int        i,rank;
56   PetscTruth iascii;
57   PetscErrorCode ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
61   if (!viewer) viewer = PETSC_VIEWER_STDOUT_(mapping->comm);
62   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_COOKIE,2);
63 
64   ierr = MPI_Comm_rank(mapping->comm,&rank);CHKERRQ(ierr);
65   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
66   if (iascii) {
67     for (i=0; i<mapping->n; i++) {
68       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
69     }
70     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
71   } else {
72     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
73   }
74 
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@C
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   int      n,*indices;
102   MPI_Comm comm;
103 
104   PetscFunctionBegin;
105   PetscValidHeaderSpecific(is,IS_COOKIE,1);
106   PetscValidPointer(mapping,2);
107 
108   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
109   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
110   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
111   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
112   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
113 
114   PetscFunctionReturn(0);
115 }
116 
117 
118 #undef __FUNCT__
119 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
120 /*@C
121     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
122     ordering and a global parallel ordering.
123 
124     Not Collective, but communicator may have more than one process
125 
126     Input Parameters:
127 +   comm - MPI communicator
128 .   n - the number of local elements
129 -   indices - the global index for each local element
130 
131     Output Parameter:
132 .   mapping - new mapping data structure
133 
134     Level: advanced
135 
136     Concepts: mapping^local to global
137 
138 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreateNC()
139 @*/
140 PetscErrorCode ISLocalToGlobalMappingCreate(MPI_Comm cm,int n,const int indices[],ISLocalToGlobalMapping *mapping)
141 {
142   PetscErrorCode ierr;
143   int *in;
144 
145   PetscFunctionBegin;
146   PetscValidIntPointer(indices,3);
147   PetscValidPointer(mapping,4);
148   ierr = PetscMalloc((n+1)*sizeof(int),&in);CHKERRQ(ierr);
149   ierr = PetscMemcpy(in,indices,n*sizeof(int));CHKERRQ(ierr);
150   ierr = ISLocalToGlobalMappingCreateNC(cm,n,in,mapping);CHKERRQ(ierr);
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "ISLocalToGlobalMappingCreateNC"
156 /*@C
157     ISLocalToGlobalMappingCreateNC - Creates a mapping between a local (0 to n)
158     ordering and a global parallel ordering.
159 
160     Not Collective, but communicator may have more than one process
161 
162     Input Parameters:
163 +   comm - MPI communicator
164 .   n - the number of local elements
165 -   indices - the global index for each local element
166 
167     Output Parameter:
168 .   mapping - new mapping data structure
169 
170     Level: developer
171 
172     Notes: Does not copy the indices, just keeps the pointer to the indices. The ISLocalToGlobalMappingDestroy()
173     will free the space so it must be obtained with PetscMalloc() and it must not be freed elsewhere.
174 
175     Concepts: mapping^local to global
176 
177 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate()
178 @*/
179 PetscErrorCode ISLocalToGlobalMappingCreateNC(MPI_Comm cm,int n,const int indices[],ISLocalToGlobalMapping *mapping)
180 {
181   PetscErrorCode ierr;
182 
183   PetscFunctionBegin;
184   PetscValidIntPointer(indices,3);
185   PetscValidPointer(mapping,4);
186   *mapping = PETSC_NULL;
187 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
188   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
189 #endif
190   if (IS_LTOGM_COOKIE == -1) {
191     ierr = PetscLogClassRegister(&IS_LTOGM_COOKIE,"IS Local to global mapping");CHKERRQ(ierr);
192   }
193 
194   PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_COOKIE,0,"ISLocalToGlobalMapping",
195                     cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);
196   PetscLogObjectCreate(*mapping);
197   PetscLogObjectMemory(*mapping,sizeof(struct _p_ISLocalToGlobalMapping)+n*sizeof(int));
198 
199   (*mapping)->n       = n;
200   (*mapping)->indices = (int*)indices;
201 
202   /*
203       Do not create the global to local mapping. This is only created if
204      ISGlobalToLocalMapping() is called
205   */
206   (*mapping)->globals = 0;
207   PetscFunctionReturn(0);
208 }
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
212 /*@C
213     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
214        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
215        and VecSetLocalToGlobalMappingBlock().
216 
217     Not Collective, but communicator may have more than one process
218 
219     Input Parameters:
220 +    inmap - original point-wise mapping
221 -    bs - block size
222 
223     Output Parameter:
224 .   outmap - block based mapping
225 
226     Level: advanced
227 
228     Concepts: mapping^local to global
229 
230 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
231 @*/
232 PetscErrorCode ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,int bs,ISLocalToGlobalMapping *outmap)
233 {
234   PetscErrorCode ierr;
235   int            *ii,i,n;
236 
237   PetscFunctionBegin;
238   if (bs > 1) {
239     n    = inmap->n/bs;
240     ierr = PetscMalloc(n*sizeof(int),&ii);CHKERRQ(ierr);
241     for (i=0; i<n; i++) {
242       ii[i] = inmap->indices[bs*i]/bs;
243     }
244     ierr = ISLocalToGlobalMappingCreate(inmap->comm,n,ii,outmap);CHKERRQ(ierr);
245     ierr = PetscFree(ii);CHKERRQ(ierr);
246   } else {
247     *outmap = inmap;
248     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
249   }
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
255 /*@
256    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
257    ordering and a global parallel ordering.
258 
259    Note Collective
260 
261    Input Parameters:
262 .  mapping - mapping data structure
263 
264    Level: advanced
265 
266 .seealso: ISLocalToGlobalMappingCreate()
267 @*/
268 PetscErrorCode ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
269 {
270   PetscErrorCode ierr;
271   PetscFunctionBegin;
272   PetscValidPointer(mapping,1);
273   if (--mapping->refct > 0) PetscFunctionReturn(0);
274   if (mapping->refct < 0) {
275     SETERRQ(PETSC_ERR_PLIB,"Mapping already destroyed");
276   }
277 
278   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
279   if (mapping->globals) {ierr = PetscFree(mapping->globals);CHKERRQ(ierr);}
280   PetscLogObjectDestroy(mapping);
281   PetscHeaderDestroy(mapping);
282   PetscFunctionReturn(0);
283 }
284 
285 #undef __FUNCT__
286 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
287 /*@
288     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
289     a new index set using the global numbering defined in an ISLocalToGlobalMapping
290     context.
291 
292     Not collective
293 
294     Input Parameters:
295 +   mapping - mapping between local and global numbering
296 -   is - index set in local numbering
297 
298     Output Parameters:
299 .   newis - index set in global numbering
300 
301     Level: advanced
302 
303     Concepts: mapping^local to global
304 
305 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
306           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
307 @*/
308 PetscErrorCode ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
309 {
310   PetscErrorCode ierr;
311   int            n,i,*idxin,*idxmap,*idxout,Nmax = mapping->n;
312 
313   PetscFunctionBegin;
314   PetscValidPointer(mapping,1);
315   PetscValidHeaderSpecific(is,IS_COOKIE,2);
316   PetscValidPointer(newis,3);
317 
318   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
319   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
320   idxmap = mapping->indices;
321 
322   ierr = PetscMalloc((n+1)*sizeof(int),&idxout);CHKERRQ(ierr);
323   for (i=0; i<n; i++) {
324     if (idxin[i] >= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
325     idxout[i] = idxmap[idxin[i]];
326   }
327   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
328   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
329   ierr = PetscFree(idxout);CHKERRQ(ierr);
330   PetscFunctionReturn(0);
331 }
332 
333 /*MC
334    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
335    and converts them to the global numbering.
336 
337    Not collective
338 
339    Input Parameters:
340 +  mapping - the local to global mapping context
341 .  N - number of integers
342 -  in - input indices in local numbering
343 
344    Output Parameter:
345 .  out - indices in global numbering
346 
347    Synopsis:
348    int ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
349 
350    Notes:
351    The in and out array parameters may be identical.
352 
353    Level: advanced
354 
355 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
356           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
357           AOPetscToApplication(), ISGlobalToLocalMappingApply()
358 
359     Concepts: mapping^local to global
360 
361 M*/
362 
363 /* -----------------------------------------------------------------------------------------*/
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
367 /*
368     Creates the global fields in the ISLocalToGlobalMapping structure
369 */
370 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
371 {
372   PetscErrorCode ierr;
373   int            i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
374 
375   PetscFunctionBegin;
376   end   = 0;
377   start = 100000000;
378 
379   for (i=0; i<n; i++) {
380     if (idx[i] < 0) continue;
381     if (idx[i] < start) start = idx[i];
382     if (idx[i] > end)   end   = idx[i];
383   }
384   if (start > end) {start = 0; end = -1;}
385   mapping->globalstart = start;
386   mapping->globalend   = end;
387 
388   ierr             = PetscMalloc((end-start+2)*sizeof(int),&globals);CHKERRQ(ierr);
389   mapping->globals = globals;
390   for (i=0; i<end-start+1; i++) {
391     globals[i] = -1;
392   }
393   for (i=0; i<n; i++) {
394     if (idx[i] < 0) continue;
395     globals[idx[i] - start] = i;
396   }
397 
398   PetscLogObjectMemory(mapping,(end-start+1)*sizeof(int));
399   PetscFunctionReturn(0);
400 }
401 
402 #undef __FUNCT__
403 #define __FUNCT__ "ISGlobalToLocalMappingApply"
404 /*@
405     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
406     specified with a global numbering.
407 
408     Not collective
409 
410     Input Parameters:
411 +   mapping - mapping between local and global numbering
412 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
413            IS_GTOLM_DROP - drops the indices with no local value from the output list
414 .   n - number of global indices to map
415 -   idx - global indices to map
416 
417     Output Parameters:
418 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
419 -   idxout - local index of each global index, one must pass in an array long enough
420              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
421              idxout == PETSC_NULL to determine the required length (returned in nout)
422              and then allocate the required space and call ISGlobalToLocalMappingApply()
423              a second time to set the values.
424 
425     Notes:
426     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
427 
428     This is not scalable in memory usage. Each processor requires O(Nglobal) size
429     array to compute these.
430 
431     Level: advanced
432 
433     Concepts: mapping^global to local
434 
435 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
436           ISLocalToGlobalMappingDestroy()
437 @*/
438 PetscErrorCode ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
439                                   int n,const int idx[],int *nout,int idxout[])
440 {
441   int i,*globals,nf = 0,tmp,start,end;
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   if (!mapping->globals) {
446     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
447   }
448   globals = mapping->globals;
449   start   = mapping->globalstart;
450   end     = mapping->globalend;
451 
452   if (type == IS_GTOLM_MASK) {
453     if (idxout) {
454       for (i=0; i<n; i++) {
455         if (idx[i] < 0) idxout[i] = idx[i];
456         else if (idx[i] < start) idxout[i] = -1;
457         else if (idx[i] > end)   idxout[i] = -1;
458         else                     idxout[i] = globals[idx[i] - start];
459       }
460     }
461     if (nout) *nout = n;
462   } else {
463     if (idxout) {
464       for (i=0; i<n; i++) {
465         if (idx[i] < 0) continue;
466         if (idx[i] < start) continue;
467         if (idx[i] > end) continue;
468         tmp = globals[idx[i] - start];
469         if (tmp < 0) continue;
470         idxout[nf++] = tmp;
471       }
472     } else {
473       for (i=0; i<n; i++) {
474         if (idx[i] < 0) continue;
475         if (idx[i] < start) continue;
476         if (idx[i] > end) continue;
477         tmp = globals[idx[i] - start];
478         if (tmp < 0) continue;
479         nf++;
480       }
481     }
482     if (nout) *nout = nf;
483   }
484 
485   PetscFunctionReturn(0);
486 }
487 
488 #undef __FUNCT__
489 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
490 /*@C
491     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
492      each index shared by more than one processor
493 
494     Collective on ISLocalToGlobalMapping
495 
496     Input Parameters:
497 .   mapping - the mapping from local to global indexing
498 
499     Output Parameter:
500 +   nproc - number of processors that are connected to this one
501 .   proc - neighboring processors
502 .   numproc - number of indices for each subdomain (processor)
503 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
504 
505     Level: advanced
506 
507     Concepts: mapping^local to global
508 
509 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
510           ISLocalToGlobalMappingRestoreInfo()
511 @*/
512 PetscErrorCode ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,int *nproc,int *procs[],int *numprocs[],int **indices[])
513 {
514   PetscErrorCode ierr;
515   PetscMPIInt    size,rank,tag1,tag2,tag3;
516   int            i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
517   int            *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
518   int            cnt,*len,*source,imdex,scale,*ownedsenders,*nownedsenders,rstart,nowned;
519   int            node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
520   int            first_procs,first_numprocs,*first_indices;
521   MPI_Request    *recv_waits,*send_waits;
522   MPI_Status     recv_status,*send_status,*recv_statuses;
523   MPI_Comm       comm = mapping->comm;
524   PetscTruth     debug = PETSC_FALSE;
525 
526   PetscFunctionBegin;
527   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
528   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
529   if (size == 1) {
530     *nproc         = 0;
531     *procs         = PETSC_NULL;
532     ierr           = PetscMalloc(sizeof(int),numprocs);CHKERRQ(ierr);
533     (*numprocs)[0] = 0;
534     ierr           = PetscMalloc(sizeof(int*),indices);CHKERRQ(ierr);
535     (*indices)[0]  = PETSC_NULL;
536     PetscFunctionReturn(0);
537   }
538 
539   ierr = PetscOptionsHasName(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug);CHKERRQ(ierr);
540 
541   /*
542     Notes on ISLocalToGlobalMappingGetInfo
543 
544     globally owned node - the nodes that have been assigned to this processor in global
545            numbering, just for this routine.
546 
547     nontrivial globally owned node - node assigned to this processor that is on a subdomain
548            boundary (i.e. is has more than one local owner)
549 
550     locally owned node - node that exists on this processors subdomain
551 
552     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
553            local subdomain
554   */
555   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
556   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
557   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
558 
559   for (i=0; i<n; i++) {
560     if (lindices[i] > max) max = lindices[i];
561   }
562   ierr   = MPI_Allreduce(&max,&Ng,1,MPI_INT,MPI_MAX,comm);CHKERRQ(ierr);
563   Ng++;
564   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
565   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
566   scale  = Ng/size + 1;
567   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
568   rstart = scale*rank;
569 
570   /* determine ownership ranges of global indices */
571   ierr = PetscMalloc((2*size+1)*sizeof(int),&nprocs);CHKERRQ(ierr);
572   ierr = PetscMemzero(nprocs,2*size*sizeof(int));CHKERRQ(ierr);
573 
574   /* determine owners of each local node  */
575   ierr = PetscMalloc((n+1)*sizeof(int),&owner);CHKERRQ(ierr);
576   for (i=0; i<n; i++) {
577     proc             = lindices[i]/scale; /* processor that globally owns this index */
578     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
579     owner[i]         = proc;
580     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
581   }
582   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
583   PetscLogInfo(0,"ISLocalToGlobalMappingGetInfo: Number of global owners for my local data %d\n",nsends);
584 
585   /* inform other processors of number of messages and max length*/
586   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
587   PetscLogInfo(0,"ISLocalToGlobalMappingGetInfo: Number of local owners for my global data %d\n",nrecvs);
588 
589   /* post receives for owned rows */
590   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(int),&recvs);CHKERRQ(ierr);
591   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
592   for (i=0; i<nrecvs; i++) {
593     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPI_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
594   }
595 
596   /* pack messages containing lists of local nodes to owners */
597   ierr       = PetscMalloc((2*n+1)*sizeof(int),&sends);CHKERRQ(ierr);
598   ierr       = PetscMalloc((size+1)*sizeof(int),&starts);CHKERRQ(ierr);
599   starts[0]  = 0;
600   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
601   for (i=0; i<n; i++) {
602     sends[starts[owner[i]]++] = lindices[i];
603     sends[starts[owner[i]]++] = i;
604   }
605   ierr = PetscFree(owner);CHKERRQ(ierr);
606   starts[0]  = 0;
607   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
608 
609   /* send the messages */
610   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
611   ierr = PetscMalloc((nsends+1)*sizeof(int),&dest);CHKERRQ(ierr);
612   cnt = 0;
613   for (i=0; i<size; i++) {
614     if (nprocs[2*i]) {
615       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPI_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
616       dest[cnt] = i;
617       cnt++;
618     }
619   }
620   ierr = PetscFree(starts);CHKERRQ(ierr);
621 
622   /* wait on receives */
623   ierr = PetscMalloc((2*nrecvs+1)*sizeof(int),&source);CHKERRQ(ierr);
624   len  = source + nrecvs;
625   cnt  = nrecvs;
626   ierr = PetscMalloc((ng+1)*sizeof(int),&nownedsenders);CHKERRQ(ierr);
627   ierr = PetscMemzero(nownedsenders,ng*sizeof(int));CHKERRQ(ierr);
628   while (cnt) {
629     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
630     /* unpack receives into our local space */
631     ierr           = MPI_Get_count(&recv_status,MPI_INT,&len[imdex]);CHKERRQ(ierr);
632     source[imdex]  = recv_status.MPI_SOURCE;
633     len[imdex]     = len[imdex]/2;
634     /* count how many local owners for each of my global owned indices */
635     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
636     cnt--;
637   }
638   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
639 
640   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
641   nowned  = 0;
642   nownedm = 0;
643   for (i=0; i<ng; i++) {
644     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
645   }
646 
647   /* create single array to contain rank of all local owners of each globally owned index */
648   ierr      = PetscMalloc((nownedm+1)*sizeof(int),&ownedsenders);CHKERRQ(ierr);
649   ierr      = PetscMalloc((ng+1)*sizeof(int),&starts);CHKERRQ(ierr);
650   starts[0] = 0;
651   for (i=1; i<ng; i++) {
652     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
653     else starts[i] = starts[i-1];
654   }
655 
656   /* for each nontrival globally owned node list all arriving processors */
657   for (i=0; i<nrecvs; i++) {
658     for (j=0; j<len[i]; j++) {
659       node = recvs[2*i*nmax+2*j]-rstart;
660       if (nownedsenders[node] > 1) {
661         ownedsenders[starts[node]++] = source[i];
662       }
663     }
664   }
665 
666   if (debug) { /* -----------------------------------  */
667     starts[0]    = 0;
668     for (i=1; i<ng; i++) {
669       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
670       else starts[i] = starts[i-1];
671     }
672     for (i=0; i<ng; i++) {
673       if (nownedsenders[i] > 1) {
674         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
675         for (j=0; j<nownedsenders[i]; j++) {
676           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
677         }
678         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
679       }
680     }
681     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
682   }/* -----------------------------------  */
683 
684   /* wait on original sends */
685   if (nsends) {
686     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
687     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
688     ierr = PetscFree(send_status);CHKERRQ(ierr);
689   }
690   ierr = PetscFree(send_waits);CHKERRQ(ierr);
691   ierr = PetscFree(sends);CHKERRQ(ierr);
692   ierr = PetscFree(nprocs);CHKERRQ(ierr);
693 
694   /* pack messages to send back to local owners */
695   starts[0]    = 0;
696   for (i=1; i<ng; i++) {
697     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
698     else starts[i] = starts[i-1];
699   }
700   nsends2 = nrecvs;
701   ierr    = PetscMalloc((nsends2+1)*sizeof(int),&nprocs);CHKERRQ(ierr); /* length of each message */
702   for (i=0; i<nrecvs; i++) {
703     nprocs[i] = 1;
704     for (j=0; j<len[i]; j++) {
705       node = recvs[2*i*nmax+2*j]-rstart;
706       if (nownedsenders[node] > 1) {
707         nprocs[i] += 2 + nownedsenders[node];
708       }
709     }
710   }
711   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
712   ierr = PetscMalloc((nt+1)*sizeof(int),&sends2);CHKERRQ(ierr);
713   ierr = PetscMalloc((nsends2+1)*sizeof(int),&starts2);CHKERRQ(ierr);
714   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
715   /*
716      Each message is 1 + nprocs[i] long, and consists of
717        (0) the number of nodes being sent back
718        (1) the local node number,
719        (2) the number of processors sharing it,
720        (3) the processors sharing it
721   */
722   for (i=0; i<nsends2; i++) {
723     cnt = 1;
724     sends2[starts2[i]] = 0;
725     for (j=0; j<len[i]; j++) {
726       node = recvs[2*i*nmax+2*j]-rstart;
727       if (nownedsenders[node] > 1) {
728         sends2[starts2[i]]++;
729         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
730         sends2[starts2[i]+cnt++] = nownedsenders[node];
731         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(int));CHKERRQ(ierr);
732         cnt += nownedsenders[node];
733       }
734     }
735   }
736 
737   /* send the message lengths */
738   for (i=0; i<nsends2; i++) {
739     ierr = MPI_Send(&nprocs[i],1,MPI_INT,source[i],tag2,comm);CHKERRQ(ierr);
740   }
741 
742   /* receive the message lengths */
743   nrecvs2 = nsends;
744   ierr = PetscMalloc((nrecvs2+1)*sizeof(int),&lens2);CHKERRQ(ierr);
745   ierr = PetscMalloc((nrecvs2+1)*sizeof(int),&starts3);CHKERRQ(ierr);
746   nt      = 0;
747   for (i=0; i<nrecvs2; i++) {
748     ierr =  MPI_Recv(&lens2[i],1,MPI_INT,dest[i],tag2,comm,&recv_status);CHKERRQ(ierr);
749     nt   += lens2[i];
750   }
751   starts3[0] = 0;
752   for (i=0; i<nrecvs2-1; i++) {
753     starts3[i+1] = starts3[i] + lens2[i];
754   }
755   ierr = PetscMalloc((nt+1)*sizeof(int),&recvs2);CHKERRQ(ierr);
756   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
757   for (i=0; i<nrecvs2; i++) {
758     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPI_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
759   }
760 
761   /* send the messages */
762   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
763   for (i=0; i<nsends2; i++) {
764     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPI_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
765   }
766 
767   /* wait on receives */
768   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
769   ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
770   ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
771   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
772   ierr = PetscFree(nprocs);CHKERRQ(ierr);
773 
774   if (debug) { /* -----------------------------------  */
775     cnt = 0;
776     for (i=0; i<nrecvs2; i++) {
777       nt = recvs2[cnt++];
778       for (j=0; j<nt; j++) {
779         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
780         for (k=0; k<recvs2[cnt+1]; k++) {
781           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
782         }
783         cnt += 2 + recvs2[cnt+1];
784         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
785       }
786     }
787     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
788   } /* -----------------------------------  */
789 
790   /* count number subdomains for each local node */
791   ierr = PetscMalloc(size*sizeof(int),&nprocs);CHKERRQ(ierr);
792   ierr = PetscMemzero(nprocs,size*sizeof(int));CHKERRQ(ierr);
793   cnt  = 0;
794   for (i=0; i<nrecvs2; i++) {
795     nt = recvs2[cnt++];
796     for (j=0; j<nt; j++) {
797       for (k=0; k<recvs2[cnt+1]; k++) {
798         nprocs[recvs2[cnt+2+k]]++;
799       }
800       cnt += 2 + recvs2[cnt+1];
801     }
802   }
803   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
804   *nproc    = nt;
805   ierr = PetscMalloc((nt+1)*sizeof(int),procs);CHKERRQ(ierr);
806   ierr = PetscMalloc((nt+1)*sizeof(int),numprocs);CHKERRQ(ierr);
807   ierr = PetscMalloc((nt+1)*sizeof(int*),indices);CHKERRQ(ierr);
808   ierr = PetscMalloc(size*sizeof(int),&bprocs);CHKERRQ(ierr);
809   cnt       = 0;
810   for (i=0; i<size; i++) {
811     if (nprocs[i] > 0) {
812       bprocs[i]        = cnt;
813       (*procs)[cnt]    = i;
814       (*numprocs)[cnt] = nprocs[i];
815       ierr             = PetscMalloc(nprocs[i]*sizeof(int),&(*indices)[cnt]);CHKERRQ(ierr);
816       cnt++;
817     }
818   }
819 
820   /* make the list of subdomains for each nontrivial local node */
821   ierr = PetscMemzero(*numprocs,nt*sizeof(int));CHKERRQ(ierr);
822   cnt  = 0;
823   for (i=0; i<nrecvs2; i++) {
824     nt = recvs2[cnt++];
825     for (j=0; j<nt; j++) {
826       for (k=0; k<recvs2[cnt+1]; k++) {
827         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
828       }
829       cnt += 2 + recvs2[cnt+1];
830     }
831   }
832   ierr = PetscFree(bprocs);CHKERRQ(ierr);
833   ierr = PetscFree(recvs2);CHKERRQ(ierr);
834 
835   /* sort the node indexing by their global numbers */
836   nt = *nproc;
837   for (i=0; i<nt; i++) {
838     ierr = PetscMalloc(((*numprocs)[i])*sizeof(int),&tmp);CHKERRQ(ierr);
839     for (j=0; j<(*numprocs)[i]; j++) {
840       tmp[j] = lindices[(*indices)[i][j]];
841     }
842     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
843     ierr = PetscFree(tmp);CHKERRQ(ierr);
844   }
845 
846   if (debug) { /* -----------------------------------  */
847     nt = *nproc;
848     for (i=0; i<nt; i++) {
849       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
850       for (j=0; j<(*numprocs)[i]; j++) {
851         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
852       }
853       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
854     }
855     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
856   } /* -----------------------------------  */
857 
858   /* wait on sends */
859   if (nsends2) {
860     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
861     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
862     ierr = PetscFree(send_status);CHKERRQ(ierr);
863   }
864 
865   ierr = PetscFree(starts3);CHKERRQ(ierr);
866   ierr = PetscFree(dest);CHKERRQ(ierr);
867   ierr = PetscFree(send_waits);CHKERRQ(ierr);
868 
869   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
870   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
871   ierr = PetscFree(starts);CHKERRQ(ierr);
872   ierr = PetscFree(starts2);CHKERRQ(ierr);
873   ierr = PetscFree(lens2);CHKERRQ(ierr);
874 
875   ierr = PetscFree(source);CHKERRQ(ierr);
876   ierr = PetscFree(recvs);CHKERRQ(ierr);
877   ierr = PetscFree(nprocs);CHKERRQ(ierr);
878   ierr = PetscFree(sends2);CHKERRQ(ierr);
879 
880   /* put the information about myself as the first entry in the list */
881   first_procs    = (*procs)[0];
882   first_numprocs = (*numprocs)[0];
883   first_indices  = (*indices)[0];
884   for (i=0; i<*nproc; i++) {
885     if ((*procs)[i] == rank) {
886       (*procs)[0]    = (*procs)[i];
887       (*numprocs)[0] = (*numprocs)[i];
888       (*indices)[0]  = (*indices)[i];
889       (*procs)[i]    = first_procs;
890       (*numprocs)[i] = first_numprocs;
891       (*indices)[i]  = first_indices;
892       break;
893     }
894   }
895 
896   PetscFunctionReturn(0);
897 }
898 
899 #undef __FUNCT__
900 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
901 /*@C
902     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
903 
904     Collective on ISLocalToGlobalMapping
905 
906     Input Parameters:
907 .   mapping - the mapping from local to global indexing
908 
909     Output Parameter:
910 +   nproc - number of processors that are connected to this one
911 .   proc - neighboring processors
912 .   numproc - number of indices for each processor
913 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
914 
915     Level: advanced
916 
917 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
918           ISLocalToGlobalMappingGetInfo()
919 @*/
920 PetscErrorCode ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,int *nproc,int *procs[],int *numprocs[],int **indices[])
921 {
922   PetscErrorCode ierr;
923   int i;
924 
925   PetscFunctionBegin;
926   if (*procs) {ierr = PetscFree(*procs);CHKERRQ(ierr);}
927   if (*numprocs) {ierr = PetscFree(*numprocs);CHKERRQ(ierr);}
928   if (*indices) {
929     if ((*indices)[0]) {ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);}
930     for (i=1; i<*nproc; i++) {
931       if ((*indices)[i]) {ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);}
932     }
933     ierr = PetscFree(*indices);CHKERRQ(ierr);
934   }
935   PetscFunctionReturn(0);
936 }
937 
938 
939 
940 
941 
942 
943 
944 
945 
946 
947 
948 
949 
950 
951 
952 
953 
954 
955