xref: /petsc/src/vec/is/utils/isltog.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 
2 #include "petscvec.h"   /*I "petscvec.h" I*/
3 #include "private/isimpl.h"    /*I "petscis.h"  I*/
4 
5 PetscClassId  IS_LTOGM_CLASSID;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
9 /*@C
10     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   n - the number of entries in the local mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
27 {
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
30   PetscValidIntPointer(n,2);
31   *n = mapping->n;
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "ISLocalToGlobalMappingView"
37 /*@C
38     ISLocalToGlobalMappingView - View a local to global mapping
39 
40     Not Collective
41 
42     Input Parameters:
43 +   ltog - local to global mapping
44 -   viewer - viewer
45 
46     Level: advanced
47 
48     Concepts: mapping^local to global
49 
50 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51 @*/
52 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
53 {
54   PetscInt        i;
55   PetscMPIInt     rank;
56   PetscBool       iascii;
57   PetscErrorCode  ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   if (!viewer) {
62     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
63   }
64   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
65 
66   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
67   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
68   if (iascii) {
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73   } else {
74     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   }
76 
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
82 /*@
83     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
84     ordering and a global parallel ordering.
85 
86     Not collective
87 
88     Input Parameter:
89 .   is - index set containing the global numbers for each local number
90 
91     Output Parameter:
92 .   mapping - new mapping data structure
93 
94     Level: advanced
95 
96     Concepts: mapping^local to global
97 
98 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
99 @*/
100 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
101 {
102   PetscErrorCode ierr;
103   PetscInt       n;
104   const PetscInt *indices;
105   MPI_Comm       comm;
106 
107   PetscFunctionBegin;
108   PetscValidHeaderSpecific(is,IS_CLASSID,1);
109   PetscValidPointer(mapping,2);
110 
111   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
112   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
113   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
114   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
115   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
116   PetscFunctionReturn(0);
117 }
118 
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
122 /*@
123     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
124     ordering and a global parallel ordering.
125 
126     Not Collective, but communicator may have more than one process
127 
128     Input Parameters:
129 +   comm - MPI communicator
130 .   n - the number of local elements
131 .   indices - the global index for each local element
132 -   mode - see PetscCopyMode
133 
134     Output Parameter:
135 .   mapping - new mapping data structure
136 
137     Level: advanced
138 
139     Concepts: mapping^local to global
140 
141 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
142 @*/
143 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
144 {
145   PetscErrorCode ierr;
146   PetscInt       *in;
147 
148   PetscFunctionBegin;
149   if (n) PetscValidIntPointer(indices,3);
150   PetscValidPointer(mapping,4);
151 
152   *mapping = PETSC_NULL;
153 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
154   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
155 #endif
156 
157   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping",
158 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
159   (*mapping)->n       = n;
160   /*
161     Do not create the global to local mapping. This is only created if
162     ISGlobalToLocalMapping() is called
163   */
164   (*mapping)->globals = 0;
165   if (mode == PETSC_COPY_VALUES) {
166     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
167     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
168     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
169     (*mapping)->indices = in;
170   } else if (mode == PETSC_OWN_POINTER) {
171     (*mapping)->indices = (PetscInt*)indices;
172   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
178 /*@
179     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
180        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
181        and VecSetLocalToGlobalMappingBlock().
182 
183     Not Collective, but communicator may have more than one process
184 
185     Input Parameters:
186 +    inmap - original point-wise mapping
187 -    bs - block size
188 
189     Output Parameter:
190 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
191 
192     Level: advanced
193 
194     Concepts: mapping^local to global
195 
196 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
197 @*/
198 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
199 {
200   PetscErrorCode ierr;
201   PetscInt       *ii,i,n;
202 
203   PetscFunctionBegin;
204   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
205   PetscValidPointer(outmap,3);
206   if (bs > 1) {
207     n    = inmap->n/bs;
208     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
209     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
210     for (i=0; i<n; i++) {
211       ii[i] = inmap->indices[bs*i]/bs;
212     }
213     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
214   } else {
215     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
216     *outmap = inmap;
217   }
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
223 /*@
224     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
225        ISLocalToGlobalMapping
226 
227     Not Collective, but communicator may have more than one process
228 
229     Input Parameter:
230 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
231 - bs - block size
232 
233     Output Parameter:
234 .   outmap - pointwise mapping
235 
236     Level: advanced
237 
238     Concepts: mapping^local to global
239 
240 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
241 @*/
242 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
243 {
244   PetscErrorCode ierr;
245   PetscInt       *ii,i,n;
246 
247   PetscFunctionBegin;
248   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
249   PetscValidPointer(outmap,2);
250   if (bs > 1) {
251     n    = inmap->n*bs;
252     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
253     for (i=0; i<n; i++) {
254       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
255     }
256     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
257   } else {
258     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
259     *outmap = inmap;
260   }
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
266 /*@
267    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
268    ordering and a global parallel ordering.
269 
270    Note Collective
271 
272    Input Parameters:
273 .  mapping - mapping data structure
274 
275    Level: advanced
276 
277 .seealso: ISLocalToGlobalMappingCreate()
278 @*/
279 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
280 {
281   PetscErrorCode ierr;
282   PetscFunctionBegin;
283   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
284   if (--((PetscObject)mapping)->refct > 0) PetscFunctionReturn(0);
285   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
286   ierr = PetscFree(mapping->globals);CHKERRQ(ierr);
287   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
288   PetscFunctionReturn(0);
289 }
290 
291 #undef __FUNCT__
292 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
293 /*@
294     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
295     a new index set using the global numbering defined in an ISLocalToGlobalMapping
296     context.
297 
298     Not collective
299 
300     Input Parameters:
301 +   mapping - mapping between local and global numbering
302 -   is - index set in local numbering
303 
304     Output Parameters:
305 .   newis - index set in global numbering
306 
307     Level: advanced
308 
309     Concepts: mapping^local to global
310 
311 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
312           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
313 @*/
314 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
315 {
316   PetscErrorCode ierr;
317   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
318   const PetscInt *idxin;
319 
320   PetscFunctionBegin;
321   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
322   PetscValidHeaderSpecific(is,IS_CLASSID,2);
323   PetscValidPointer(newis,3);
324 
325   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
326   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
327   idxmap = mapping->indices;
328 
329   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
330   for (i=0; i<n; i++) {
331     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
332     idxout[i] = idxmap[idxin[i]];
333   }
334   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
335   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 
339 /*MC
340    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
341    and converts them to the global numbering.
342 
343    Synopsis:
344    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
345 
346    Not collective
347 
348    Input Parameters:
349 +  mapping - the local to global mapping context
350 .  N - number of integers
351 -  in - input indices in local numbering
352 
353    Output Parameter:
354 .  out - indices in global numbering
355 
356    Notes:
357    The in and out array parameters may be identical.
358 
359    Level: advanced
360 
361 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
362           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
363           AOPetscToApplication(), ISGlobalToLocalMappingApply()
364 
365     Concepts: mapping^local to global
366 
367 M*/
368 
369 /* -----------------------------------------------------------------------------------------*/
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
373 /*
374     Creates the global fields in the ISLocalToGlobalMapping structure
375 */
376 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
377 {
378   PetscErrorCode ierr;
379   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
380 
381   PetscFunctionBegin;
382   end   = 0;
383   start = 100000000;
384 
385   for (i=0; i<n; i++) {
386     if (idx[i] < 0) continue;
387     if (idx[i] < start) start = idx[i];
388     if (idx[i] > end)   end   = idx[i];
389   }
390   if (start > end) {start = 0; end = -1;}
391   mapping->globalstart = start;
392   mapping->globalend   = end;
393 
394   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
395   mapping->globals = globals;
396   for (i=0; i<end-start+1; i++) {
397     globals[i] = -1;
398   }
399   for (i=0; i<n; i++) {
400     if (idx[i] < 0) continue;
401     globals[idx[i] - start] = i;
402   }
403 
404   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
405   PetscFunctionReturn(0);
406 }
407 
408 #undef __FUNCT__
409 #define __FUNCT__ "ISGlobalToLocalMappingApply"
410 /*@
411     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
412     specified with a global numbering.
413 
414     Not collective
415 
416     Input Parameters:
417 +   mapping - mapping between local and global numbering
418 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
419            IS_GTOLM_DROP - drops the indices with no local value from the output list
420 .   n - number of global indices to map
421 -   idx - global indices to map
422 
423     Output Parameters:
424 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
425 -   idxout - local index of each global index, one must pass in an array long enough
426              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
427              idxout == PETSC_NULL to determine the required length (returned in nout)
428              and then allocate the required space and call ISGlobalToLocalMappingApply()
429              a second time to set the values.
430 
431     Notes:
432     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
433 
434     This is not scalable in memory usage. Each processor requires O(Nglobal) size
435     array to compute these.
436 
437     Level: advanced
438 
439     Concepts: mapping^global to local
440 
441 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
442           ISLocalToGlobalMappingDestroy()
443 @*/
444 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
445                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
446 {
447   PetscInt       i,*globals,nf = 0,tmp,start,end;
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
452   if (!mapping->globals) {
453     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
454   }
455   globals = mapping->globals;
456   start   = mapping->globalstart;
457   end     = mapping->globalend;
458 
459   if (type == IS_GTOLM_MASK) {
460     if (idxout) {
461       for (i=0; i<n; i++) {
462         if (idx[i] < 0) idxout[i] = idx[i];
463         else if (idx[i] < start) idxout[i] = -1;
464         else if (idx[i] > end)   idxout[i] = -1;
465         else                     idxout[i] = globals[idx[i] - start];
466       }
467     }
468     if (nout) *nout = n;
469   } else {
470     if (idxout) {
471       for (i=0; i<n; i++) {
472         if (idx[i] < 0) continue;
473         if (idx[i] < start) continue;
474         if (idx[i] > end) continue;
475         tmp = globals[idx[i] - start];
476         if (tmp < 0) continue;
477         idxout[nf++] = tmp;
478       }
479     } else {
480       for (i=0; i<n; i++) {
481         if (idx[i] < 0) continue;
482         if (idx[i] < start) continue;
483         if (idx[i] > end) continue;
484         tmp = globals[idx[i] - start];
485         if (tmp < 0) continue;
486         nf++;
487       }
488     }
489     if (nout) *nout = nf;
490   }
491 
492   PetscFunctionReturn(0);
493 }
494 
495 #undef __FUNCT__
496 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
497 /*@C
498     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
499      each index shared by more than one processor
500 
501     Collective on ISLocalToGlobalMapping
502 
503     Input Parameters:
504 .   mapping - the mapping from local to global indexing
505 
506     Output Parameter:
507 +   nproc - number of processors that are connected to this one
508 .   proc - neighboring processors
509 .   numproc - number of indices for each subdomain (processor)
510 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
511 
512     Level: advanced
513 
514     Concepts: mapping^local to global
515 
516     Fortran Usage:
517 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
518 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
519           PetscInt indices[nproc][numprocmax],ierr)
520         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
521         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
522 
523 
524 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
525           ISLocalToGlobalMappingRestoreInfo()
526 @*/
527 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
528 {
529   PetscErrorCode ierr;
530   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
531   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
532   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
533   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
534   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
535   PetscInt       first_procs,first_numprocs,*first_indices;
536   MPI_Request    *recv_waits,*send_waits;
537   MPI_Status     recv_status,*send_status,*recv_statuses;
538   MPI_Comm       comm = ((PetscObject)mapping)->comm;
539   PetscBool      debug = PETSC_FALSE;
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
543   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
544   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
545   if (size == 1) {
546     *nproc         = 0;
547     *procs         = PETSC_NULL;
548     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
549     (*numprocs)[0] = 0;
550     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
551     (*indices)[0]  = PETSC_NULL;
552     PetscFunctionReturn(0);
553   }
554 
555   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
556 
557   /*
558     Notes on ISLocalToGlobalMappingGetInfo
559 
560     globally owned node - the nodes that have been assigned to this processor in global
561            numbering, just for this routine.
562 
563     nontrivial globally owned node - node assigned to this processor that is on a subdomain
564            boundary (i.e. is has more than one local owner)
565 
566     locally owned node - node that exists on this processors subdomain
567 
568     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
569            local subdomain
570   */
571   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
572   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
573   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
574 
575   for (i=0; i<n; i++) {
576     if (lindices[i] > max) max = lindices[i];
577   }
578   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
579   Ng++;
580   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
581   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
582   scale  = Ng/size + 1;
583   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
584   rstart = scale*rank;
585 
586   /* determine ownership ranges of global indices */
587   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
588   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
589 
590   /* determine owners of each local node  */
591   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
592   for (i=0; i<n; i++) {
593     proc             = lindices[i]/scale; /* processor that globally owns this index */
594     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
595     owner[i]         = proc;
596     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
597   }
598   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
599   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
600 
601   /* inform other processors of number of messages and max length*/
602   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
603   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
604 
605   /* post receives for owned rows */
606   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
607   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
608   for (i=0; i<nrecvs; i++) {
609     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
610   }
611 
612   /* pack messages containing lists of local nodes to owners */
613   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
614   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
615   starts[0]  = 0;
616   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
617   for (i=0; i<n; i++) {
618     sends[starts[owner[i]]++] = lindices[i];
619     sends[starts[owner[i]]++] = i;
620   }
621   ierr = PetscFree(owner);CHKERRQ(ierr);
622   starts[0]  = 0;
623   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
624 
625   /* send the messages */
626   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
627   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
628   cnt = 0;
629   for (i=0; i<size; i++) {
630     if (nprocs[2*i]) {
631       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
632       dest[cnt] = i;
633       cnt++;
634     }
635   }
636   ierr = PetscFree(starts);CHKERRQ(ierr);
637 
638   /* wait on receives */
639   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
640   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
641   cnt  = nrecvs;
642   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
643   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
644   while (cnt) {
645     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
646     /* unpack receives into our local space */
647     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
648     source[imdex]  = recv_status.MPI_SOURCE;
649     len[imdex]     = len[imdex]/2;
650     /* count how many local owners for each of my global owned indices */
651     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
652     cnt--;
653   }
654   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
655 
656   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
657   nowned  = 0;
658   nownedm = 0;
659   for (i=0; i<ng; i++) {
660     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
661   }
662 
663   /* create single array to contain rank of all local owners of each globally owned index */
664   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
665   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
666   starts[0] = 0;
667   for (i=1; i<ng; i++) {
668     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
669     else starts[i] = starts[i-1];
670   }
671 
672   /* for each nontrival globally owned node list all arriving processors */
673   for (i=0; i<nrecvs; i++) {
674     for (j=0; j<len[i]; j++) {
675       node = recvs[2*i*nmax+2*j]-rstart;
676       if (nownedsenders[node] > 1) {
677         ownedsenders[starts[node]++] = source[i];
678       }
679     }
680   }
681 
682   if (debug) { /* -----------------------------------  */
683     starts[0]    = 0;
684     for (i=1; i<ng; i++) {
685       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
686       else starts[i] = starts[i-1];
687     }
688     for (i=0; i<ng; i++) {
689       if (nownedsenders[i] > 1) {
690         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
691         for (j=0; j<nownedsenders[i]; j++) {
692           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
693         }
694         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
695       }
696     }
697     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
698   }/* -----------------------------------  */
699 
700   /* wait on original sends */
701   if (nsends) {
702     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
703     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
704     ierr = PetscFree(send_status);CHKERRQ(ierr);
705   }
706   ierr = PetscFree(send_waits);CHKERRQ(ierr);
707   ierr = PetscFree(sends);CHKERRQ(ierr);
708   ierr = PetscFree(nprocs);CHKERRQ(ierr);
709 
710   /* pack messages to send back to local owners */
711   starts[0]    = 0;
712   for (i=1; i<ng; i++) {
713     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
714     else starts[i] = starts[i-1];
715   }
716   nsends2 = nrecvs;
717   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
718   for (i=0; i<nrecvs; i++) {
719     nprocs[i] = 1;
720     for (j=0; j<len[i]; j++) {
721       node = recvs[2*i*nmax+2*j]-rstart;
722       if (nownedsenders[node] > 1) {
723         nprocs[i] += 2 + nownedsenders[node];
724       }
725     }
726   }
727   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
728   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
729   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
730   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
731   /*
732      Each message is 1 + nprocs[i] long, and consists of
733        (0) the number of nodes being sent back
734        (1) the local node number,
735        (2) the number of processors sharing it,
736        (3) the processors sharing it
737   */
738   for (i=0; i<nsends2; i++) {
739     cnt = 1;
740     sends2[starts2[i]] = 0;
741     for (j=0; j<len[i]; j++) {
742       node = recvs[2*i*nmax+2*j]-rstart;
743       if (nownedsenders[node] > 1) {
744         sends2[starts2[i]]++;
745         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
746         sends2[starts2[i]+cnt++] = nownedsenders[node];
747         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
748         cnt += nownedsenders[node];
749       }
750     }
751   }
752 
753   /* receive the message lengths */
754   nrecvs2 = nsends;
755   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
756   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
757   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
758   for (i=0; i<nrecvs2; i++) {
759     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
760   }
761 
762   /* send the message lengths */
763   for (i=0; i<nsends2; i++) {
764     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
765   }
766 
767   /* wait on receives of lens */
768   if (nrecvs2) {
769     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
770     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
771     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
772   }
773   ierr = PetscFree(recv_waits);
774 
775   starts3[0] = 0;
776   nt         = 0;
777   for (i=0; i<nrecvs2-1; i++) {
778     starts3[i+1] = starts3[i] + lens2[i];
779     nt          += lens2[i];
780   }
781   nt += lens2[nrecvs2-1];
782 
783   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
784   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
785   for (i=0; i<nrecvs2; i++) {
786     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
787   }
788 
789   /* send the messages */
790   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
791   for (i=0; i<nsends2; i++) {
792     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
793   }
794 
795   /* wait on receives */
796   if (nrecvs2) {
797     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
798     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
799     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
800   }
801   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
802   ierr = PetscFree(nprocs);CHKERRQ(ierr);
803 
804   if (debug) { /* -----------------------------------  */
805     cnt = 0;
806     for (i=0; i<nrecvs2; i++) {
807       nt = recvs2[cnt++];
808       for (j=0; j<nt; j++) {
809         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
810         for (k=0; k<recvs2[cnt+1]; k++) {
811           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
812         }
813         cnt += 2 + recvs2[cnt+1];
814         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
815       }
816     }
817     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
818   } /* -----------------------------------  */
819 
820   /* count number subdomains for each local node */
821   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
822   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
823   cnt  = 0;
824   for (i=0; i<nrecvs2; i++) {
825     nt = recvs2[cnt++];
826     for (j=0; j<nt; j++) {
827       for (k=0; k<recvs2[cnt+1]; k++) {
828         nprocs[recvs2[cnt+2+k]]++;
829       }
830       cnt += 2 + recvs2[cnt+1];
831     }
832   }
833   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
834   *nproc    = nt;
835   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
836   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
837   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
838   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
839   cnt       = 0;
840   for (i=0; i<size; i++) {
841     if (nprocs[i] > 0) {
842       bprocs[i]        = cnt;
843       (*procs)[cnt]    = i;
844       (*numprocs)[cnt] = nprocs[i];
845       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
846       cnt++;
847     }
848   }
849 
850   /* make the list of subdomains for each nontrivial local node */
851   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
852   cnt  = 0;
853   for (i=0; i<nrecvs2; i++) {
854     nt = recvs2[cnt++];
855     for (j=0; j<nt; j++) {
856       for (k=0; k<recvs2[cnt+1]; k++) {
857         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
858       }
859       cnt += 2 + recvs2[cnt+1];
860     }
861   }
862   ierr = PetscFree(bprocs);CHKERRQ(ierr);
863   ierr = PetscFree(recvs2);CHKERRQ(ierr);
864 
865   /* sort the node indexing by their global numbers */
866   nt = *nproc;
867   for (i=0; i<nt; i++) {
868     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
869     for (j=0; j<(*numprocs)[i]; j++) {
870       tmp[j] = lindices[(*indices)[i][j]];
871     }
872     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
873     ierr = PetscFree(tmp);CHKERRQ(ierr);
874   }
875 
876   if (debug) { /* -----------------------------------  */
877     nt = *nproc;
878     for (i=0; i<nt; i++) {
879       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
880       for (j=0; j<(*numprocs)[i]; j++) {
881         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
882       }
883       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
884     }
885     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
886   } /* -----------------------------------  */
887 
888   /* wait on sends */
889   if (nsends2) {
890     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
891     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
892     ierr = PetscFree(send_status);CHKERRQ(ierr);
893   }
894 
895   ierr = PetscFree(starts3);CHKERRQ(ierr);
896   ierr = PetscFree(dest);CHKERRQ(ierr);
897   ierr = PetscFree(send_waits);CHKERRQ(ierr);
898 
899   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
900   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
901   ierr = PetscFree(starts);CHKERRQ(ierr);
902   ierr = PetscFree(starts2);CHKERRQ(ierr);
903   ierr = PetscFree(lens2);CHKERRQ(ierr);
904 
905   ierr = PetscFree(source);CHKERRQ(ierr);
906   ierr = PetscFree(len);CHKERRQ(ierr);
907   ierr = PetscFree(recvs);CHKERRQ(ierr);
908   ierr = PetscFree(nprocs);CHKERRQ(ierr);
909   ierr = PetscFree(sends2);CHKERRQ(ierr);
910 
911   /* put the information about myself as the first entry in the list */
912   first_procs    = (*procs)[0];
913   first_numprocs = (*numprocs)[0];
914   first_indices  = (*indices)[0];
915   for (i=0; i<*nproc; i++) {
916     if ((*procs)[i] == rank) {
917       (*procs)[0]    = (*procs)[i];
918       (*numprocs)[0] = (*numprocs)[i];
919       (*indices)[0]  = (*indices)[i];
920       (*procs)[i]    = first_procs;
921       (*numprocs)[i] = first_numprocs;
922       (*indices)[i]  = first_indices;
923       break;
924     }
925   }
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
931 /*@C
932     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
933 
934     Collective on ISLocalToGlobalMapping
935 
936     Input Parameters:
937 .   mapping - the mapping from local to global indexing
938 
939     Output Parameter:
940 +   nproc - number of processors that are connected to this one
941 .   proc - neighboring processors
942 .   numproc - number of indices for each processor
943 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
944 
945     Level: advanced
946 
947 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
948           ISLocalToGlobalMappingGetInfo()
949 @*/
950 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
951 {
952   PetscErrorCode ierr;
953   PetscInt       i;
954 
955   PetscFunctionBegin;
956   ierr = PetscFree(*procs);CHKERRQ(ierr);
957   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
958   if (*indices) {
959     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
960     for (i=1; i<*nproc; i++) {
961       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
962     }
963     ierr = PetscFree(*indices);CHKERRQ(ierr);
964   }
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
970 /*@C
971    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
972 
973    Not Collective
974 
975    Input Arguments:
976 . ltog - local to global mapping
977 
978    Output Arguments:
979 . array - array of indices
980 
981    Level: advanced
982 
983 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
984 @*/
985 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
986 {
987 
988   PetscFunctionBegin;
989   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
990   PetscValidPointer(array,2);
991   *array = ltog->indices;
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
997 /*@C
998    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
999 
1000    Not Collective
1001 
1002    Input Arguments:
1003 + ltog - local to global mapping
1004 - array - array of indices
1005 
1006    Level: advanced
1007 
1008 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1009 @*/
1010 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1011 {
1012 
1013   PetscFunctionBegin;
1014   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1015   PetscValidPointer(array,2);
1016   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1017   *array = PETSC_NULL;
1018   PetscFunctionReturn(0);
1019 }
1020