xref: /petsc/src/vec/is/utils/isltog.c (revision 58a6ce570e2e2bd779d08c606a9a845717091e63)
1 
2 #include <petscvec.h>   /*I "petscvec.h" I*/
3 #include <private/isimpl.h>    /*I "petscis.h"  I*/
4 
5 PetscClassId  IS_LTOGM_CLASSID;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
9 /*@C
10     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   n - the number of entries in the local mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
27 {
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
30   PetscValidIntPointer(n,2);
31   *n = mapping->n;
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "ISLocalToGlobalMappingView"
37 /*@C
38     ISLocalToGlobalMappingView - View a local to global mapping
39 
40     Not Collective
41 
42     Input Parameters:
43 +   ltog - local to global mapping
44 -   viewer - viewer
45 
46     Level: advanced
47 
48     Concepts: mapping^local to global
49 
50 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51 @*/
52 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
53 {
54   PetscInt        i;
55   PetscMPIInt     rank;
56   PetscBool       iascii;
57   PetscErrorCode  ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   if (!viewer) {
62     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
63   }
64   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
65 
66   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
67   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
68   if (iascii) {
69     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
75   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
81 /*@
82     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
83     ordering and a global parallel ordering.
84 
85     Not collective
86 
87     Input Parameter:
88 .   is - index set containing the global numbers for each local number
89 
90     Output Parameter:
91 .   mapping - new mapping data structure
92 
93     Level: advanced
94 
95     Concepts: mapping^local to global
96 
97 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
98 @*/
99 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
100 {
101   PetscErrorCode ierr;
102   PetscInt       n;
103   const PetscInt *indices;
104   MPI_Comm       comm;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(is,IS_CLASSID,1);
108   PetscValidPointer(mapping,2);
109 
110   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
111   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
112   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
113   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
114   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
115   PetscFunctionReturn(0);
116 }
117 
118 
119 #undef __FUNCT__
120 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
121 /*@
122     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
123     ordering and a global parallel ordering.
124 
125     Not Collective, but communicator may have more than one process
126 
127     Input Parameters:
128 +   comm - MPI communicator
129 .   n - the number of local elements
130 .   indices - the global index for each local element
131 -   mode - see PetscCopyMode
132 
133     Output Parameter:
134 .   mapping - new mapping data structure
135 
136     Level: advanced
137 
138     Concepts: mapping^local to global
139 
140 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
141 @*/
142 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
143 {
144   PetscErrorCode ierr;
145   PetscInt       *in;
146 
147   PetscFunctionBegin;
148   if (n) PetscValidIntPointer(indices,3);
149   PetscValidPointer(mapping,4);
150 
151   *mapping = PETSC_NULL;
152 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
153   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
154 #endif
155 
156   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping",
157 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
158   (*mapping)->n       = n;
159   /*
160     Do not create the global to local mapping. This is only created if
161     ISGlobalToLocalMapping() is called
162   */
163   (*mapping)->globals = 0;
164   if (mode == PETSC_COPY_VALUES) {
165     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
166     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
167     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
168     (*mapping)->indices = in;
169   } else if (mode == PETSC_OWN_POINTER) {
170     (*mapping)->indices = (PetscInt*)indices;
171   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
177 /*@
178     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
179        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
180        and VecSetLocalToGlobalMappingBlock().
181 
182     Not Collective, but communicator may have more than one process
183 
184     Input Parameters:
185 +    inmap - original point-wise mapping
186 -    bs - block size
187 
188     Output Parameter:
189 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
190 
191     Level: advanced
192 
193     Concepts: mapping^local to global
194 
195 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
196 @*/
197 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
198 {
199   PetscErrorCode ierr;
200   PetscInt       *ii,i,n;
201 
202   PetscFunctionBegin;
203   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
204   PetscValidPointer(outmap,3);
205   if (bs > 1) {
206     n    = inmap->n/bs;
207     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
208     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
209     for (i=0; i<n; i++) {
210       ii[i] = inmap->indices[bs*i]/bs;
211     }
212     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
213   } else {
214     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
215     *outmap = inmap;
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
222 /*@
223     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
224        ISLocalToGlobalMapping
225 
226     Not Collective, but communicator may have more than one process
227 
228     Input Parameter:
229 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
230 - bs - block size
231 
232     Output Parameter:
233 .   outmap - pointwise mapping
234 
235     Level: advanced
236 
237     Concepts: mapping^local to global
238 
239 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
240 @*/
241 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
242 {
243   PetscErrorCode ierr;
244   PetscInt       *ii,i,n;
245 
246   PetscFunctionBegin;
247   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
248   PetscValidPointer(outmap,2);
249   if (bs > 1) {
250     n    = inmap->n*bs;
251     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
252     for (i=0; i<n; i++) {
253       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
254     }
255     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
256   } else {
257     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
258     *outmap = inmap;
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
265 /*@
266    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
267    ordering and a global parallel ordering.
268 
269    Note Collective
270 
271    Input Parameters:
272 .  mapping - mapping data structure
273 
274    Level: advanced
275 
276 .seealso: ISLocalToGlobalMappingCreate()
277 @*/
278 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping mapping)
279 {
280   PetscErrorCode ierr;
281   PetscFunctionBegin;
282   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
283   if (--((PetscObject)mapping)->refct > 0) PetscFunctionReturn(0);
284   ierr = PetscFree(mapping->indices);CHKERRQ(ierr);
285   ierr = PetscFree(mapping->globals);CHKERRQ(ierr);
286   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
287   PetscFunctionReturn(0);
288 }
289 
290 #undef __FUNCT__
291 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
292 /*@
293     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
294     a new index set using the global numbering defined in an ISLocalToGlobalMapping
295     context.
296 
297     Not collective
298 
299     Input Parameters:
300 +   mapping - mapping between local and global numbering
301 -   is - index set in local numbering
302 
303     Output Parameters:
304 .   newis - index set in global numbering
305 
306     Level: advanced
307 
308     Concepts: mapping^local to global
309 
310 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
311           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
312 @*/
313 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
314 {
315   PetscErrorCode ierr;
316   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
317   const PetscInt *idxin;
318 
319   PetscFunctionBegin;
320   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
321   PetscValidHeaderSpecific(is,IS_CLASSID,2);
322   PetscValidPointer(newis,3);
323 
324   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
325   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
326   idxmap = mapping->indices;
327 
328   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
329   for (i=0; i<n; i++) {
330     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
331     idxout[i] = idxmap[idxin[i]];
332   }
333   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
334   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
335   PetscFunctionReturn(0);
336 }
337 
338 /*MC
339    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
340    and converts them to the global numbering.
341 
342    Synopsis:
343    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
344 
345    Not collective
346 
347    Input Parameters:
348 +  mapping - the local to global mapping context
349 .  N - number of integers
350 -  in - input indices in local numbering
351 
352    Output Parameter:
353 .  out - indices in global numbering
354 
355    Notes:
356    The in and out array parameters may be identical.
357 
358    Level: advanced
359 
360 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
361           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
362           AOPetscToApplication(), ISGlobalToLocalMappingApply()
363 
364     Concepts: mapping^local to global
365 
366 M*/
367 
368 /* -----------------------------------------------------------------------------------------*/
369 
370 #undef __FUNCT__
371 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
372 /*
373     Creates the global fields in the ISLocalToGlobalMapping structure
374 */
375 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
376 {
377   PetscErrorCode ierr;
378   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
379 
380   PetscFunctionBegin;
381   end   = 0;
382   start = 100000000;
383 
384   for (i=0; i<n; i++) {
385     if (idx[i] < 0) continue;
386     if (idx[i] < start) start = idx[i];
387     if (idx[i] > end)   end   = idx[i];
388   }
389   if (start > end) {start = 0; end = -1;}
390   mapping->globalstart = start;
391   mapping->globalend   = end;
392 
393   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
394   mapping->globals = globals;
395   for (i=0; i<end-start+1; i++) {
396     globals[i] = -1;
397   }
398   for (i=0; i<n; i++) {
399     if (idx[i] < 0) continue;
400     globals[idx[i] - start] = i;
401   }
402 
403   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
404   PetscFunctionReturn(0);
405 }
406 
407 #undef __FUNCT__
408 #define __FUNCT__ "ISGlobalToLocalMappingApply"
409 /*@
410     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
411     specified with a global numbering.
412 
413     Not collective
414 
415     Input Parameters:
416 +   mapping - mapping between local and global numbering
417 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
418            IS_GTOLM_DROP - drops the indices with no local value from the output list
419 .   n - number of global indices to map
420 -   idx - global indices to map
421 
422     Output Parameters:
423 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
424 -   idxout - local index of each global index, one must pass in an array long enough
425              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
426              idxout == PETSC_NULL to determine the required length (returned in nout)
427              and then allocate the required space and call ISGlobalToLocalMappingApply()
428              a second time to set the values.
429 
430     Notes:
431     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
432 
433     This is not scalable in memory usage. Each processor requires O(Nglobal) size
434     array to compute these.
435 
436     Level: advanced
437 
438     Concepts: mapping^global to local
439 
440 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
441           ISLocalToGlobalMappingDestroy()
442 @*/
443 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
444                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
445 {
446   PetscInt       i,*globals,nf = 0,tmp,start,end;
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
451   if (!mapping->globals) {
452     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
453   }
454   globals = mapping->globals;
455   start   = mapping->globalstart;
456   end     = mapping->globalend;
457 
458   if (type == IS_GTOLM_MASK) {
459     if (idxout) {
460       for (i=0; i<n; i++) {
461         if (idx[i] < 0) idxout[i] = idx[i];
462         else if (idx[i] < start) idxout[i] = -1;
463         else if (idx[i] > end)   idxout[i] = -1;
464         else                     idxout[i] = globals[idx[i] - start];
465       }
466     }
467     if (nout) *nout = n;
468   } else {
469     if (idxout) {
470       for (i=0; i<n; i++) {
471         if (idx[i] < 0) continue;
472         if (idx[i] < start) continue;
473         if (idx[i] > end) continue;
474         tmp = globals[idx[i] - start];
475         if (tmp < 0) continue;
476         idxout[nf++] = tmp;
477       }
478     } else {
479       for (i=0; i<n; i++) {
480         if (idx[i] < 0) continue;
481         if (idx[i] < start) continue;
482         if (idx[i] > end) continue;
483         tmp = globals[idx[i] - start];
484         if (tmp < 0) continue;
485         nf++;
486       }
487     }
488     if (nout) *nout = nf;
489   }
490 
491   PetscFunctionReturn(0);
492 }
493 
494 #undef __FUNCT__
495 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
496 /*@C
497     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
498      each index shared by more than one processor
499 
500     Collective on ISLocalToGlobalMapping
501 
502     Input Parameters:
503 .   mapping - the mapping from local to global indexing
504 
505     Output Parameter:
506 +   nproc - number of processors that are connected to this one
507 .   proc - neighboring processors
508 .   numproc - number of indices for each subdomain (processor)
509 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
510 
511     Level: advanced
512 
513     Concepts: mapping^local to global
514 
515     Fortran Usage:
516 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
517 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
518           PetscInt indices[nproc][numprocmax],ierr)
519         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
520         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
521 
522 
523 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
524           ISLocalToGlobalMappingRestoreInfo()
525 @*/
526 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
527 {
528   PetscErrorCode ierr;
529   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
530   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
531   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
532   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
533   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
534   PetscInt       first_procs,first_numprocs,*first_indices;
535   MPI_Request    *recv_waits,*send_waits;
536   MPI_Status     recv_status,*send_status,*recv_statuses;
537   MPI_Comm       comm = ((PetscObject)mapping)->comm;
538   PetscBool      debug = PETSC_FALSE;
539 
540   PetscFunctionBegin;
541   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
542   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
543   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
544   if (size == 1) {
545     *nproc         = 0;
546     *procs         = PETSC_NULL;
547     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
548     (*numprocs)[0] = 0;
549     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
550     (*indices)[0]  = PETSC_NULL;
551     PetscFunctionReturn(0);
552   }
553 
554   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
555 
556   /*
557     Notes on ISLocalToGlobalMappingGetInfo
558 
559     globally owned node - the nodes that have been assigned to this processor in global
560            numbering, just for this routine.
561 
562     nontrivial globally owned node - node assigned to this processor that is on a subdomain
563            boundary (i.e. is has more than one local owner)
564 
565     locally owned node - node that exists on this processors subdomain
566 
567     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
568            local subdomain
569   */
570   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
571   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
572   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
573 
574   for (i=0; i<n; i++) {
575     if (lindices[i] > max) max = lindices[i];
576   }
577   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
578   Ng++;
579   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
580   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
581   scale  = Ng/size + 1;
582   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
583   rstart = scale*rank;
584 
585   /* determine ownership ranges of global indices */
586   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
587   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
588 
589   /* determine owners of each local node  */
590   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
591   for (i=0; i<n; i++) {
592     proc             = lindices[i]/scale; /* processor that globally owns this index */
593     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
594     owner[i]         = proc;
595     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
596   }
597   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
598   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
599 
600   /* inform other processors of number of messages and max length*/
601   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
602   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
603 
604   /* post receives for owned rows */
605   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
606   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
607   for (i=0; i<nrecvs; i++) {
608     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
609   }
610 
611   /* pack messages containing lists of local nodes to owners */
612   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
613   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
614   starts[0]  = 0;
615   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
616   for (i=0; i<n; i++) {
617     sends[starts[owner[i]]++] = lindices[i];
618     sends[starts[owner[i]]++] = i;
619   }
620   ierr = PetscFree(owner);CHKERRQ(ierr);
621   starts[0]  = 0;
622   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
623 
624   /* send the messages */
625   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
626   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
627   cnt = 0;
628   for (i=0; i<size; i++) {
629     if (nprocs[2*i]) {
630       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
631       dest[cnt] = i;
632       cnt++;
633     }
634   }
635   ierr = PetscFree(starts);CHKERRQ(ierr);
636 
637   /* wait on receives */
638   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
639   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
640   cnt  = nrecvs;
641   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
642   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
643   while (cnt) {
644     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
645     /* unpack receives into our local space */
646     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
647     source[imdex]  = recv_status.MPI_SOURCE;
648     len[imdex]     = len[imdex]/2;
649     /* count how many local owners for each of my global owned indices */
650     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
651     cnt--;
652   }
653   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
654 
655   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
656   nowned  = 0;
657   nownedm = 0;
658   for (i=0; i<ng; i++) {
659     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
660   }
661 
662   /* create single array to contain rank of all local owners of each globally owned index */
663   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
664   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
665   starts[0] = 0;
666   for (i=1; i<ng; i++) {
667     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
668     else starts[i] = starts[i-1];
669   }
670 
671   /* for each nontrival globally owned node list all arriving processors */
672   for (i=0; i<nrecvs; i++) {
673     for (j=0; j<len[i]; j++) {
674       node = recvs[2*i*nmax+2*j]-rstart;
675       if (nownedsenders[node] > 1) {
676         ownedsenders[starts[node]++] = source[i];
677       }
678     }
679   }
680 
681   if (debug) { /* -----------------------------------  */
682     starts[0]    = 0;
683     for (i=1; i<ng; i++) {
684       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
685       else starts[i] = starts[i-1];
686     }
687     for (i=0; i<ng; i++) {
688       if (nownedsenders[i] > 1) {
689         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
690         for (j=0; j<nownedsenders[i]; j++) {
691           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
692         }
693         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
694       }
695     }
696     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
697   }/* -----------------------------------  */
698 
699   /* wait on original sends */
700   if (nsends) {
701     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
702     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
703     ierr = PetscFree(send_status);CHKERRQ(ierr);
704   }
705   ierr = PetscFree(send_waits);CHKERRQ(ierr);
706   ierr = PetscFree(sends);CHKERRQ(ierr);
707   ierr = PetscFree(nprocs);CHKERRQ(ierr);
708 
709   /* pack messages to send back to local owners */
710   starts[0]    = 0;
711   for (i=1; i<ng; i++) {
712     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
713     else starts[i] = starts[i-1];
714   }
715   nsends2 = nrecvs;
716   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
717   for (i=0; i<nrecvs; i++) {
718     nprocs[i] = 1;
719     for (j=0; j<len[i]; j++) {
720       node = recvs[2*i*nmax+2*j]-rstart;
721       if (nownedsenders[node] > 1) {
722         nprocs[i] += 2 + nownedsenders[node];
723       }
724     }
725   }
726   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
727   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
728   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
729   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
730   /*
731      Each message is 1 + nprocs[i] long, and consists of
732        (0) the number of nodes being sent back
733        (1) the local node number,
734        (2) the number of processors sharing it,
735        (3) the processors sharing it
736   */
737   for (i=0; i<nsends2; i++) {
738     cnt = 1;
739     sends2[starts2[i]] = 0;
740     for (j=0; j<len[i]; j++) {
741       node = recvs[2*i*nmax+2*j]-rstart;
742       if (nownedsenders[node] > 1) {
743         sends2[starts2[i]]++;
744         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
745         sends2[starts2[i]+cnt++] = nownedsenders[node];
746         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
747         cnt += nownedsenders[node];
748       }
749     }
750   }
751 
752   /* receive the message lengths */
753   nrecvs2 = nsends;
754   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
755   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
756   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
757   for (i=0; i<nrecvs2; i++) {
758     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
759   }
760 
761   /* send the message lengths */
762   for (i=0; i<nsends2; i++) {
763     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
764   }
765 
766   /* wait on receives of lens */
767   if (nrecvs2) {
768     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
769     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
770     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
771   }
772   ierr = PetscFree(recv_waits);
773 
774   starts3[0] = 0;
775   nt         = 0;
776   for (i=0; i<nrecvs2-1; i++) {
777     starts3[i+1] = starts3[i] + lens2[i];
778     nt          += lens2[i];
779   }
780   nt += lens2[nrecvs2-1];
781 
782   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
783   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
784   for (i=0; i<nrecvs2; i++) {
785     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
786   }
787 
788   /* send the messages */
789   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
790   for (i=0; i<nsends2; i++) {
791     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
792   }
793 
794   /* wait on receives */
795   if (nrecvs2) {
796     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
797     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
798     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
799   }
800   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
801   ierr = PetscFree(nprocs);CHKERRQ(ierr);
802 
803   if (debug) { /* -----------------------------------  */
804     cnt = 0;
805     for (i=0; i<nrecvs2; i++) {
806       nt = recvs2[cnt++];
807       for (j=0; j<nt; j++) {
808         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
809         for (k=0; k<recvs2[cnt+1]; k++) {
810           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
811         }
812         cnt += 2 + recvs2[cnt+1];
813         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
814       }
815     }
816     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
817   } /* -----------------------------------  */
818 
819   /* count number subdomains for each local node */
820   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
821   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
822   cnt  = 0;
823   for (i=0; i<nrecvs2; i++) {
824     nt = recvs2[cnt++];
825     for (j=0; j<nt; j++) {
826       for (k=0; k<recvs2[cnt+1]; k++) {
827         nprocs[recvs2[cnt+2+k]]++;
828       }
829       cnt += 2 + recvs2[cnt+1];
830     }
831   }
832   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
833   *nproc    = nt;
834   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
835   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
836   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
837   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
838   cnt       = 0;
839   for (i=0; i<size; i++) {
840     if (nprocs[i] > 0) {
841       bprocs[i]        = cnt;
842       (*procs)[cnt]    = i;
843       (*numprocs)[cnt] = nprocs[i];
844       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
845       cnt++;
846     }
847   }
848 
849   /* make the list of subdomains for each nontrivial local node */
850   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
851   cnt  = 0;
852   for (i=0; i<nrecvs2; i++) {
853     nt = recvs2[cnt++];
854     for (j=0; j<nt; j++) {
855       for (k=0; k<recvs2[cnt+1]; k++) {
856         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
857       }
858       cnt += 2 + recvs2[cnt+1];
859     }
860   }
861   ierr = PetscFree(bprocs);CHKERRQ(ierr);
862   ierr = PetscFree(recvs2);CHKERRQ(ierr);
863 
864   /* sort the node indexing by their global numbers */
865   nt = *nproc;
866   for (i=0; i<nt; i++) {
867     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
868     for (j=0; j<(*numprocs)[i]; j++) {
869       tmp[j] = lindices[(*indices)[i][j]];
870     }
871     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
872     ierr = PetscFree(tmp);CHKERRQ(ierr);
873   }
874 
875   if (debug) { /* -----------------------------------  */
876     nt = *nproc;
877     for (i=0; i<nt; i++) {
878       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
879       for (j=0; j<(*numprocs)[i]; j++) {
880         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
881       }
882       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
883     }
884     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
885   } /* -----------------------------------  */
886 
887   /* wait on sends */
888   if (nsends2) {
889     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
890     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
891     ierr = PetscFree(send_status);CHKERRQ(ierr);
892   }
893 
894   ierr = PetscFree(starts3);CHKERRQ(ierr);
895   ierr = PetscFree(dest);CHKERRQ(ierr);
896   ierr = PetscFree(send_waits);CHKERRQ(ierr);
897 
898   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
899   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
900   ierr = PetscFree(starts);CHKERRQ(ierr);
901   ierr = PetscFree(starts2);CHKERRQ(ierr);
902   ierr = PetscFree(lens2);CHKERRQ(ierr);
903 
904   ierr = PetscFree(source);CHKERRQ(ierr);
905   ierr = PetscFree(len);CHKERRQ(ierr);
906   ierr = PetscFree(recvs);CHKERRQ(ierr);
907   ierr = PetscFree(nprocs);CHKERRQ(ierr);
908   ierr = PetscFree(sends2);CHKERRQ(ierr);
909 
910   /* put the information about myself as the first entry in the list */
911   first_procs    = (*procs)[0];
912   first_numprocs = (*numprocs)[0];
913   first_indices  = (*indices)[0];
914   for (i=0; i<*nproc; i++) {
915     if ((*procs)[i] == rank) {
916       (*procs)[0]    = (*procs)[i];
917       (*numprocs)[0] = (*numprocs)[i];
918       (*indices)[0]  = (*indices)[i];
919       (*procs)[i]    = first_procs;
920       (*numprocs)[i] = first_numprocs;
921       (*indices)[i]  = first_indices;
922       break;
923     }
924   }
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
930 /*@C
931     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
932 
933     Collective on ISLocalToGlobalMapping
934 
935     Input Parameters:
936 .   mapping - the mapping from local to global indexing
937 
938     Output Parameter:
939 +   nproc - number of processors that are connected to this one
940 .   proc - neighboring processors
941 .   numproc - number of indices for each processor
942 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
943 
944     Level: advanced
945 
946 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
947           ISLocalToGlobalMappingGetInfo()
948 @*/
949 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
950 {
951   PetscErrorCode ierr;
952   PetscInt       i;
953 
954   PetscFunctionBegin;
955   ierr = PetscFree(*procs);CHKERRQ(ierr);
956   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
957   if (*indices) {
958     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
959     for (i=1; i<*nproc; i++) {
960       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
961     }
962     ierr = PetscFree(*indices);CHKERRQ(ierr);
963   }
964   PetscFunctionReturn(0);
965 }
966 
967 #undef __FUNCT__
968 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
969 /*@C
970    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
971 
972    Not Collective
973 
974    Input Arguments:
975 . ltog - local to global mapping
976 
977    Output Arguments:
978 . array - array of indices
979 
980    Level: advanced
981 
982 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
983 @*/
984 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
985 {
986 
987   PetscFunctionBegin;
988   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
989   PetscValidPointer(array,2);
990   *array = ltog->indices;
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
996 /*@C
997    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
998 
999    Not Collective
1000 
1001    Input Arguments:
1002 + ltog - local to global mapping
1003 - array - array of indices
1004 
1005    Level: advanced
1006 
1007 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1008 @*/
1009 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1010 {
1011 
1012   PetscFunctionBegin;
1013   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1014   PetscValidPointer(array,2);
1015   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1016   *array = PETSC_NULL;
1017   PetscFunctionReturn(0);
1018 }
1019