xref: /petsc/src/vec/is/utils/isltog.c (revision 6d0a4a0e3360c7060e83f77f4ff2ec32a1048728)
1 
2 #include <petscvec.h>   /*I "petscvec.h" I*/
3 #include <private/isimpl.h>    /*I "petscis.h"  I*/
4 
5 PetscClassId  IS_LTOGM_CLASSID;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
9 /*@C
10     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   n - the number of entries in the local mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
27 {
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
30   PetscValidIntPointer(n,2);
31   *n = mapping->n;
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "ISLocalToGlobalMappingView"
37 /*@C
38     ISLocalToGlobalMappingView - View a local to global mapping
39 
40     Not Collective
41 
42     Input Parameters:
43 +   ltog - local to global mapping
44 -   viewer - viewer
45 
46     Level: advanced
47 
48     Concepts: mapping^local to global
49 
50 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51 @*/
52 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
53 {
54   PetscInt        i;
55   PetscMPIInt     rank;
56   PetscBool       iascii;
57   PetscErrorCode  ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   if (!viewer) {
62     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
63   }
64   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
65 
66   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
67   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
68   if (iascii) {
69     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
75   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
81 /*@
82     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
83     ordering and a global parallel ordering.
84 
85     Not collective
86 
87     Input Parameter:
88 .   is - index set containing the global numbers for each local number
89 
90     Output Parameter:
91 .   mapping - new mapping data structure
92 
93     Level: advanced
94 
95     Concepts: mapping^local to global
96 
97 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
98 @*/
99 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
100 {
101   PetscErrorCode ierr;
102   PetscInt       n;
103   const PetscInt *indices;
104   MPI_Comm       comm;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(is,IS_CLASSID,1);
108   PetscValidPointer(mapping,2);
109 
110   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
111   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
112   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
113   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
114   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
115   PetscFunctionReturn(0);
116 }
117 
118 
119 #undef __FUNCT__
120 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
121 /*@
122     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
123     ordering and a global parallel ordering.
124 
125     Not Collective, but communicator may have more than one process
126 
127     Input Parameters:
128 +   comm - MPI communicator
129 .   n - the number of local elements
130 .   indices - the global index for each local element
131 -   mode - see PetscCopyMode
132 
133     Output Parameter:
134 .   mapping - new mapping data structure
135 
136     Level: advanced
137 
138     Concepts: mapping^local to global
139 
140 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
141 @*/
142 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
143 {
144   PetscErrorCode ierr;
145   PetscInt       *in;
146 
147   PetscFunctionBegin;
148   if (n) PetscValidIntPointer(indices,3);
149   PetscValidPointer(mapping,4);
150 
151   *mapping = PETSC_NULL;
152 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
153   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
154 #endif
155 
156   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
157 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
158   (*mapping)->n       = n;
159   /*
160     Do not create the global to local mapping. This is only created if
161     ISGlobalToLocalMapping() is called
162   */
163   (*mapping)->globals = 0;
164   if (mode == PETSC_COPY_VALUES) {
165     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
166     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
167     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
168     (*mapping)->indices = in;
169   } else if (mode == PETSC_OWN_POINTER) {
170     (*mapping)->indices = (PetscInt*)indices;
171   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
177 /*@
178     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
179        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
180        and VecSetLocalToGlobalMappingBlock().
181 
182     Not Collective, but communicator may have more than one process
183 
184     Input Parameters:
185 +    inmap - original point-wise mapping
186 -    bs - block size
187 
188     Output Parameter:
189 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
190 
191     Level: advanced
192 
193     Concepts: mapping^local to global
194 
195 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
196 @*/
197 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
198 {
199   PetscErrorCode ierr;
200   PetscInt       *ii,i,n;
201 
202   PetscFunctionBegin;
203   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
204   PetscValidPointer(outmap,3);
205   if (bs > 1) {
206     n    = inmap->n/bs;
207     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
208     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
209     for (i=0; i<n; i++) {
210       ii[i] = inmap->indices[bs*i]/bs;
211     }
212     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
213   } else {
214     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
215     *outmap = inmap;
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
222 /*@
223     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
224        ISLocalToGlobalMapping
225 
226     Not Collective, but communicator may have more than one process
227 
228     Input Parameter:
229 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
230 - bs - block size
231 
232     Output Parameter:
233 .   outmap - pointwise mapping
234 
235     Level: advanced
236 
237     Concepts: mapping^local to global
238 
239 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
240 @*/
241 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
242 {
243   PetscErrorCode ierr;
244   PetscInt       *ii,i,n;
245 
246   PetscFunctionBegin;
247   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
248   PetscValidPointer(outmap,2);
249   if (bs > 1) {
250     n    = inmap->n*bs;
251     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
252     for (i=0; i<n; i++) {
253       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
254     }
255     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
256   } else {
257     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
258     *outmap = inmap;
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
265 /*@
266    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
267    ordering and a global parallel ordering.
268 
269    Note Collective
270 
271    Input Parameters:
272 .  mapping - mapping data structure
273 
274    Level: advanced
275 
276 .seealso: ISLocalToGlobalMappingCreate()
277 @*/
278 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
279 {
280   PetscErrorCode ierr;
281   PetscFunctionBegin;
282   if (!*mapping) PetscFunctionReturn(0);
283   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
284   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
285   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
286   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
287   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
288   *mapping = 0;
289   PetscFunctionReturn(0);
290 }
291 
292 #undef __FUNCT__
293 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
294 /*@
295     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
296     a new index set using the global numbering defined in an ISLocalToGlobalMapping
297     context.
298 
299     Not collective
300 
301     Input Parameters:
302 +   mapping - mapping between local and global numbering
303 -   is - index set in local numbering
304 
305     Output Parameters:
306 .   newis - index set in global numbering
307 
308     Level: advanced
309 
310     Concepts: mapping^local to global
311 
312 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
313           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
314 @*/
315 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
316 {
317   PetscErrorCode ierr;
318   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
319   const PetscInt *idxin;
320 
321   PetscFunctionBegin;
322   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
323   PetscValidHeaderSpecific(is,IS_CLASSID,2);
324   PetscValidPointer(newis,3);
325 
326   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
327   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
328   idxmap = mapping->indices;
329 
330   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
331   for (i=0; i<n; i++) {
332     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
333     idxout[i] = idxmap[idxin[i]];
334   }
335   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
336   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
337   PetscFunctionReturn(0);
338 }
339 
340 /*MC
341    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
342    and converts them to the global numbering.
343 
344    Synopsis:
345    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
346 
347    Not collective
348 
349    Input Parameters:
350 +  mapping - the local to global mapping context
351 .  N - number of integers
352 -  in - input indices in local numbering
353 
354    Output Parameter:
355 .  out - indices in global numbering
356 
357    Notes:
358    The in and out array parameters may be identical.
359 
360    Level: advanced
361 
362 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
363           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
364           AOPetscToApplication(), ISGlobalToLocalMappingApply()
365 
366     Concepts: mapping^local to global
367 
368 M*/
369 
370 /* -----------------------------------------------------------------------------------------*/
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
374 /*
375     Creates the global fields in the ISLocalToGlobalMapping structure
376 */
377 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
378 {
379   PetscErrorCode ierr;
380   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
381 
382   PetscFunctionBegin;
383   end   = 0;
384   start = 100000000;
385 
386   for (i=0; i<n; i++) {
387     if (idx[i] < 0) continue;
388     if (idx[i] < start) start = idx[i];
389     if (idx[i] > end)   end   = idx[i];
390   }
391   if (start > end) {start = 0; end = -1;}
392   mapping->globalstart = start;
393   mapping->globalend   = end;
394 
395   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
396   mapping->globals = globals;
397   for (i=0; i<end-start+1; i++) {
398     globals[i] = -1;
399   }
400   for (i=0; i<n; i++) {
401     if (idx[i] < 0) continue;
402     globals[idx[i] - start] = i;
403   }
404 
405   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
406   PetscFunctionReturn(0);
407 }
408 
409 #undef __FUNCT__
410 #define __FUNCT__ "ISGlobalToLocalMappingApply"
411 /*@
412     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
413     specified with a global numbering.
414 
415     Not collective
416 
417     Input Parameters:
418 +   mapping - mapping between local and global numbering
419 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
420            IS_GTOLM_DROP - drops the indices with no local value from the output list
421 .   n - number of global indices to map
422 -   idx - global indices to map
423 
424     Output Parameters:
425 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
426 -   idxout - local index of each global index, one must pass in an array long enough
427              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
428              idxout == PETSC_NULL to determine the required length (returned in nout)
429              and then allocate the required space and call ISGlobalToLocalMappingApply()
430              a second time to set the values.
431 
432     Notes:
433     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
434 
435     This is not scalable in memory usage. Each processor requires O(Nglobal) size
436     array to compute these.
437 
438     Level: advanced
439 
440     Concepts: mapping^global to local
441 
442 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
443           ISLocalToGlobalMappingDestroy()
444 @*/
445 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
446                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
447 {
448   PetscInt       i,*globals,nf = 0,tmp,start,end;
449   PetscErrorCode ierr;
450 
451   PetscFunctionBegin;
452   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
453   if (!mapping->globals) {
454     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
455   }
456   globals = mapping->globals;
457   start   = mapping->globalstart;
458   end     = mapping->globalend;
459 
460   if (type == IS_GTOLM_MASK) {
461     if (idxout) {
462       for (i=0; i<n; i++) {
463         if (idx[i] < 0) idxout[i] = idx[i];
464         else if (idx[i] < start) idxout[i] = -1;
465         else if (idx[i] > end)   idxout[i] = -1;
466         else                     idxout[i] = globals[idx[i] - start];
467       }
468     }
469     if (nout) *nout = n;
470   } else {
471     if (idxout) {
472       for (i=0; i<n; i++) {
473         if (idx[i] < 0) continue;
474         if (idx[i] < start) continue;
475         if (idx[i] > end) continue;
476         tmp = globals[idx[i] - start];
477         if (tmp < 0) continue;
478         idxout[nf++] = tmp;
479       }
480     } else {
481       for (i=0; i<n; i++) {
482         if (idx[i] < 0) continue;
483         if (idx[i] < start) continue;
484         if (idx[i] > end) continue;
485         tmp = globals[idx[i] - start];
486         if (tmp < 0) continue;
487         nf++;
488       }
489     }
490     if (nout) *nout = nf;
491   }
492 
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
498 /*@C
499     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
500      each index shared by more than one processor
501 
502     Collective on ISLocalToGlobalMapping
503 
504     Input Parameters:
505 .   mapping - the mapping from local to global indexing
506 
507     Output Parameter:
508 +   nproc - number of processors that are connected to this one
509 .   proc - neighboring processors
510 .   numproc - number of indices for each subdomain (processor)
511 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
512 
513     Level: advanced
514 
515     Concepts: mapping^local to global
516 
517     Fortran Usage:
518 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
519 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
520           PetscInt indices[nproc][numprocmax],ierr)
521         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
522         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
523 
524 
525 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
526           ISLocalToGlobalMappingRestoreInfo()
527 @*/
528 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
529 {
530   PetscErrorCode ierr;
531   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
532   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
533   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
534   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
535   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
536   PetscInt       first_procs,first_numprocs,*first_indices;
537   MPI_Request    *recv_waits,*send_waits;
538   MPI_Status     recv_status,*send_status,*recv_statuses;
539   MPI_Comm       comm = ((PetscObject)mapping)->comm;
540   PetscBool      debug = PETSC_FALSE;
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
544   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
545   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
546   if (size == 1) {
547     *nproc         = 0;
548     *procs         = PETSC_NULL;
549     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
550     (*numprocs)[0] = 0;
551     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
552     (*indices)[0]  = PETSC_NULL;
553     PetscFunctionReturn(0);
554   }
555 
556   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
557 
558   /*
559     Notes on ISLocalToGlobalMappingGetInfo
560 
561     globally owned node - the nodes that have been assigned to this processor in global
562            numbering, just for this routine.
563 
564     nontrivial globally owned node - node assigned to this processor that is on a subdomain
565            boundary (i.e. is has more than one local owner)
566 
567     locally owned node - node that exists on this processors subdomain
568 
569     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
570            local subdomain
571   */
572   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
573   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
574   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
575 
576   for (i=0; i<n; i++) {
577     if (lindices[i] > max) max = lindices[i];
578   }
579   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
580   Ng++;
581   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
582   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
583   scale  = Ng/size + 1;
584   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
585   rstart = scale*rank;
586 
587   /* determine ownership ranges of global indices */
588   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
589   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
590 
591   /* determine owners of each local node  */
592   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
593   for (i=0; i<n; i++) {
594     proc             = lindices[i]/scale; /* processor that globally owns this index */
595     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
596     owner[i]         = proc;
597     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
598   }
599   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
600   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
601 
602   /* inform other processors of number of messages and max length*/
603   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
604   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
605 
606   /* post receives for owned rows */
607   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
608   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
609   for (i=0; i<nrecvs; i++) {
610     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
611   }
612 
613   /* pack messages containing lists of local nodes to owners */
614   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
615   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
616   starts[0]  = 0;
617   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
618   for (i=0; i<n; i++) {
619     sends[starts[owner[i]]++] = lindices[i];
620     sends[starts[owner[i]]++] = i;
621   }
622   ierr = PetscFree(owner);CHKERRQ(ierr);
623   starts[0]  = 0;
624   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
625 
626   /* send the messages */
627   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
628   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
629   cnt = 0;
630   for (i=0; i<size; i++) {
631     if (nprocs[2*i]) {
632       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
633       dest[cnt] = i;
634       cnt++;
635     }
636   }
637   ierr = PetscFree(starts);CHKERRQ(ierr);
638 
639   /* wait on receives */
640   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
641   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
642   cnt  = nrecvs;
643   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
644   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
645   while (cnt) {
646     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
647     /* unpack receives into our local space */
648     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
649     source[imdex]  = recv_status.MPI_SOURCE;
650     len[imdex]     = len[imdex]/2;
651     /* count how many local owners for each of my global owned indices */
652     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
653     cnt--;
654   }
655   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
656 
657   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
658   nowned  = 0;
659   nownedm = 0;
660   for (i=0; i<ng; i++) {
661     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
662   }
663 
664   /* create single array to contain rank of all local owners of each globally owned index */
665   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
666   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
667   starts[0] = 0;
668   for (i=1; i<ng; i++) {
669     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
670     else starts[i] = starts[i-1];
671   }
672 
673   /* for each nontrival globally owned node list all arriving processors */
674   for (i=0; i<nrecvs; i++) {
675     for (j=0; j<len[i]; j++) {
676       node = recvs[2*i*nmax+2*j]-rstart;
677       if (nownedsenders[node] > 1) {
678         ownedsenders[starts[node]++] = source[i];
679       }
680     }
681   }
682 
683   if (debug) { /* -----------------------------------  */
684     starts[0]    = 0;
685     for (i=1; i<ng; i++) {
686       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
687       else starts[i] = starts[i-1];
688     }
689     for (i=0; i<ng; i++) {
690       if (nownedsenders[i] > 1) {
691         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
692         for (j=0; j<nownedsenders[i]; j++) {
693           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
694         }
695         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
696       }
697     }
698     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
699   }/* -----------------------------------  */
700 
701   /* wait on original sends */
702   if (nsends) {
703     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
704     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
705     ierr = PetscFree(send_status);CHKERRQ(ierr);
706   }
707   ierr = PetscFree(send_waits);CHKERRQ(ierr);
708   ierr = PetscFree(sends);CHKERRQ(ierr);
709   ierr = PetscFree(nprocs);CHKERRQ(ierr);
710 
711   /* pack messages to send back to local owners */
712   starts[0]    = 0;
713   for (i=1; i<ng; i++) {
714     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
715     else starts[i] = starts[i-1];
716   }
717   nsends2 = nrecvs;
718   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
719   for (i=0; i<nrecvs; i++) {
720     nprocs[i] = 1;
721     for (j=0; j<len[i]; j++) {
722       node = recvs[2*i*nmax+2*j]-rstart;
723       if (nownedsenders[node] > 1) {
724         nprocs[i] += 2 + nownedsenders[node];
725       }
726     }
727   }
728   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
729   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
730   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
731   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
732   /*
733      Each message is 1 + nprocs[i] long, and consists of
734        (0) the number of nodes being sent back
735        (1) the local node number,
736        (2) the number of processors sharing it,
737        (3) the processors sharing it
738   */
739   for (i=0; i<nsends2; i++) {
740     cnt = 1;
741     sends2[starts2[i]] = 0;
742     for (j=0; j<len[i]; j++) {
743       node = recvs[2*i*nmax+2*j]-rstart;
744       if (nownedsenders[node] > 1) {
745         sends2[starts2[i]]++;
746         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
747         sends2[starts2[i]+cnt++] = nownedsenders[node];
748         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
749         cnt += nownedsenders[node];
750       }
751     }
752   }
753 
754   /* receive the message lengths */
755   nrecvs2 = nsends;
756   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
757   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
758   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
759   for (i=0; i<nrecvs2; i++) {
760     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
761   }
762 
763   /* send the message lengths */
764   for (i=0; i<nsends2; i++) {
765     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
766   }
767 
768   /* wait on receives of lens */
769   if (nrecvs2) {
770     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
771     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
772     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
773   }
774   ierr = PetscFree(recv_waits);
775 
776   starts3[0] = 0;
777   nt         = 0;
778   for (i=0; i<nrecvs2-1; i++) {
779     starts3[i+1] = starts3[i] + lens2[i];
780     nt          += lens2[i];
781   }
782   nt += lens2[nrecvs2-1];
783 
784   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
785   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
786   for (i=0; i<nrecvs2; i++) {
787     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
788   }
789 
790   /* send the messages */
791   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
792   for (i=0; i<nsends2; i++) {
793     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
794   }
795 
796   /* wait on receives */
797   if (nrecvs2) {
798     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
799     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
800     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
801   }
802   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
803   ierr = PetscFree(nprocs);CHKERRQ(ierr);
804 
805   if (debug) { /* -----------------------------------  */
806     cnt = 0;
807     for (i=0; i<nrecvs2; i++) {
808       nt = recvs2[cnt++];
809       for (j=0; j<nt; j++) {
810         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
811         for (k=0; k<recvs2[cnt+1]; k++) {
812           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
813         }
814         cnt += 2 + recvs2[cnt+1];
815         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
816       }
817     }
818     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
819   } /* -----------------------------------  */
820 
821   /* count number subdomains for each local node */
822   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
823   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
824   cnt  = 0;
825   for (i=0; i<nrecvs2; i++) {
826     nt = recvs2[cnt++];
827     for (j=0; j<nt; j++) {
828       for (k=0; k<recvs2[cnt+1]; k++) {
829         nprocs[recvs2[cnt+2+k]]++;
830       }
831       cnt += 2 + recvs2[cnt+1];
832     }
833   }
834   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
835   *nproc    = nt;
836   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
837   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
838   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
839   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
840   cnt       = 0;
841   for (i=0; i<size; i++) {
842     if (nprocs[i] > 0) {
843       bprocs[i]        = cnt;
844       (*procs)[cnt]    = i;
845       (*numprocs)[cnt] = nprocs[i];
846       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
847       cnt++;
848     }
849   }
850 
851   /* make the list of subdomains for each nontrivial local node */
852   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
853   cnt  = 0;
854   for (i=0; i<nrecvs2; i++) {
855     nt = recvs2[cnt++];
856     for (j=0; j<nt; j++) {
857       for (k=0; k<recvs2[cnt+1]; k++) {
858         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
859       }
860       cnt += 2 + recvs2[cnt+1];
861     }
862   }
863   ierr = PetscFree(bprocs);CHKERRQ(ierr);
864   ierr = PetscFree(recvs2);CHKERRQ(ierr);
865 
866   /* sort the node indexing by their global numbers */
867   nt = *nproc;
868   for (i=0; i<nt; i++) {
869     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
870     for (j=0; j<(*numprocs)[i]; j++) {
871       tmp[j] = lindices[(*indices)[i][j]];
872     }
873     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
874     ierr = PetscFree(tmp);CHKERRQ(ierr);
875   }
876 
877   if (debug) { /* -----------------------------------  */
878     nt = *nproc;
879     for (i=0; i<nt; i++) {
880       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
881       for (j=0; j<(*numprocs)[i]; j++) {
882         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
883       }
884       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
885     }
886     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
887   } /* -----------------------------------  */
888 
889   /* wait on sends */
890   if (nsends2) {
891     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
892     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
893     ierr = PetscFree(send_status);CHKERRQ(ierr);
894   }
895 
896   ierr = PetscFree(starts3);CHKERRQ(ierr);
897   ierr = PetscFree(dest);CHKERRQ(ierr);
898   ierr = PetscFree(send_waits);CHKERRQ(ierr);
899 
900   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
901   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
902   ierr = PetscFree(starts);CHKERRQ(ierr);
903   ierr = PetscFree(starts2);CHKERRQ(ierr);
904   ierr = PetscFree(lens2);CHKERRQ(ierr);
905 
906   ierr = PetscFree(source);CHKERRQ(ierr);
907   ierr = PetscFree(len);CHKERRQ(ierr);
908   ierr = PetscFree(recvs);CHKERRQ(ierr);
909   ierr = PetscFree(nprocs);CHKERRQ(ierr);
910   ierr = PetscFree(sends2);CHKERRQ(ierr);
911 
912   /* put the information about myself as the first entry in the list */
913   first_procs    = (*procs)[0];
914   first_numprocs = (*numprocs)[0];
915   first_indices  = (*indices)[0];
916   for (i=0; i<*nproc; i++) {
917     if ((*procs)[i] == rank) {
918       (*procs)[0]    = (*procs)[i];
919       (*numprocs)[0] = (*numprocs)[i];
920       (*indices)[0]  = (*indices)[i];
921       (*procs)[i]    = first_procs;
922       (*numprocs)[i] = first_numprocs;
923       (*indices)[i]  = first_indices;
924       break;
925     }
926   }
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
932 /*@C
933     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
934 
935     Collective on ISLocalToGlobalMapping
936 
937     Input Parameters:
938 .   mapping - the mapping from local to global indexing
939 
940     Output Parameter:
941 +   nproc - number of processors that are connected to this one
942 .   proc - neighboring processors
943 .   numproc - number of indices for each processor
944 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
945 
946     Level: advanced
947 
948 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
949           ISLocalToGlobalMappingGetInfo()
950 @*/
951 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
952 {
953   PetscErrorCode ierr;
954   PetscInt       i;
955 
956   PetscFunctionBegin;
957   ierr = PetscFree(*procs);CHKERRQ(ierr);
958   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
959   if (*indices) {
960     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
961     for (i=1; i<*nproc; i++) {
962       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
963     }
964     ierr = PetscFree(*indices);CHKERRQ(ierr);
965   }
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
971 /*@C
972    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
973 
974    Not Collective
975 
976    Input Arguments:
977 . ltog - local to global mapping
978 
979    Output Arguments:
980 . array - array of indices
981 
982    Level: advanced
983 
984 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
985 @*/
986 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
987 {
988 
989   PetscFunctionBegin;
990   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
991   PetscValidPointer(array,2);
992   *array = ltog->indices;
993   PetscFunctionReturn(0);
994 }
995 
996 #undef __FUNCT__
997 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
998 /*@C
999    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1000 
1001    Not Collective
1002 
1003    Input Arguments:
1004 + ltog - local to global mapping
1005 - array - array of indices
1006 
1007    Level: advanced
1008 
1009 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1010 @*/
1011 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1012 {
1013 
1014   PetscFunctionBegin;
1015   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1016   PetscValidPointer(array,2);
1017   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1018   *array = PETSC_NULL;
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 #undef __FUNCT__
1023 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1024 /*@C
1025    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1026 
1027    Not Collective
1028 
1029    Input Arguments:
1030 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1031 . n - number of mappings to concatenate
1032 - ltogs - local to global mappings
1033 
1034    Output Arguments:
1035 . ltogcat - new mapping
1036 
1037    Level: advanced
1038 
1039 .seealso: ISLocalToGlobalMappingCreate()
1040 @*/
1041 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1042 {
1043   PetscInt       i,cnt,m,*idx;
1044   PetscErrorCode ierr;
1045 
1046   PetscFunctionBegin;
1047   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1048   if (n > 0) PetscValidPointer(ltogs,3);
1049   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1050   PetscValidPointer(ltogcat,4);
1051   for (cnt=0,i=0; i<n; i++) {
1052     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1053     cnt += m;
1054   }
1055   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1056   for (cnt=0,i=0; i<n; i++) {
1057     const PetscInt *subidx;
1058     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1059     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1060     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1061     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1062     cnt += m;
1063   }
1064   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1065   PetscFunctionReturn(0);
1066 }
1067