xref: /petsc/src/vec/is/utils/isltog.c (revision 73260a9bf5f079fa3cde3d9bd07e3ae1f3545c60)
1 
2 #include <petscvec.h>   /*I "petscvec.h" I*/
3 #include <private/isimpl.h>    /*I "petscis.h"  I*/
4 
5 PetscClassId  IS_LTOGM_CLASSID;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
9 /*@C
10     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
11 
12     Not Collective
13 
14     Input Parameter:
15 .   ltog - local to global mapping
16 
17     Output Parameter:
18 .   n - the number of entries in the local mapping
19 
20     Level: advanced
21 
22     Concepts: mapping^local to global
23 
24 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
25 @*/
26 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
27 {
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
30   PetscValidIntPointer(n,2);
31   *n = mapping->n;
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "ISLocalToGlobalMappingView"
37 /*@C
38     ISLocalToGlobalMappingView - View a local to global mapping
39 
40     Not Collective
41 
42     Input Parameters:
43 +   ltog - local to global mapping
44 -   viewer - viewer
45 
46     Level: advanced
47 
48     Concepts: mapping^local to global
49 
50 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
51 @*/
52 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
53 {
54   PetscInt        i;
55   PetscMPIInt     rank;
56   PetscBool       iascii;
57   PetscErrorCode  ierr;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
61   if (!viewer) {
62     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
63   }
64   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
65 
66   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
67   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
68   if (iascii) {
69     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
70     for (i=0; i<mapping->n; i++) {
71       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
72     }
73     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
74     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
75   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
81 /*@
82     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
83     ordering and a global parallel ordering.
84 
85     Not collective
86 
87     Input Parameter:
88 .   is - index set containing the global numbers for each local number
89 
90     Output Parameter:
91 .   mapping - new mapping data structure
92 
93     Level: advanced
94 
95     Concepts: mapping^local to global
96 
97 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
98 @*/
99 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
100 {
101   PetscErrorCode ierr;
102   PetscInt       n;
103   const PetscInt *indices;
104   MPI_Comm       comm;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(is,IS_CLASSID,1);
108   PetscValidPointer(mapping,2);
109 
110   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
111   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
112   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
113   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
114   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
115   PetscFunctionReturn(0);
116 }
117 
118 
119 #undef __FUNCT__
120 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
121 /*@
122     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
123     ordering and a global parallel ordering.
124 
125     Not Collective, but communicator may have more than one process
126 
127     Input Parameters:
128 +   comm - MPI communicator
129 .   n - the number of local elements
130 .   indices - the global index for each local element
131 -   mode - see PetscCopyMode
132 
133     Output Parameter:
134 .   mapping - new mapping data structure
135 
136     Level: advanced
137 
138     Concepts: mapping^local to global
139 
140 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
141 @*/
142 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
143 {
144   PetscErrorCode ierr;
145   PetscInt       *in;
146 
147   PetscFunctionBegin;
148   if (n) PetscValidIntPointer(indices,3);
149   PetscValidPointer(mapping,4);
150 
151   *mapping = PETSC_NULL;
152 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
153   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
154 #endif
155 
156   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping",
157 			   cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
158   (*mapping)->n       = n;
159   /*
160     Do not create the global to local mapping. This is only created if
161     ISGlobalToLocalMapping() is called
162   */
163   (*mapping)->globals = 0;
164   if (mode == PETSC_COPY_VALUES) {
165     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
166     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
167     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
168     (*mapping)->indices = in;
169   } else if (mode == PETSC_OWN_POINTER) {
170     (*mapping)->indices = (PetscInt*)indices;
171   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
177 /*@
178     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
179        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
180        and VecSetLocalToGlobalMappingBlock().
181 
182     Not Collective, but communicator may have more than one process
183 
184     Input Parameters:
185 +    inmap - original point-wise mapping
186 -    bs - block size
187 
188     Output Parameter:
189 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
190 
191     Level: advanced
192 
193     Concepts: mapping^local to global
194 
195 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
196 @*/
197 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
198 {
199   PetscErrorCode ierr;
200   PetscInt       *ii,i,n;
201 
202   PetscFunctionBegin;
203   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
204   PetscValidPointer(outmap,3);
205   if (bs > 1) {
206     n    = inmap->n/bs;
207     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
208     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
209     for (i=0; i<n; i++) {
210       ii[i] = inmap->indices[bs*i]/bs;
211     }
212     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
213   } else {
214     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
215     *outmap = inmap;
216   }
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
222 /*@
223     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
224        ISLocalToGlobalMapping
225 
226     Not Collective, but communicator may have more than one process
227 
228     Input Parameter:
229 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
230 - bs - block size
231 
232     Output Parameter:
233 .   outmap - pointwise mapping
234 
235     Level: advanced
236 
237     Concepts: mapping^local to global
238 
239 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
240 @*/
241 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
242 {
243   PetscErrorCode ierr;
244   PetscInt       *ii,i,n;
245 
246   PetscFunctionBegin;
247   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
248   PetscValidPointer(outmap,2);
249   if (bs > 1) {
250     n    = inmap->n*bs;
251     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
252     for (i=0; i<n; i++) {
253       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
254     }
255     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
256   } else {
257     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
258     *outmap = inmap;
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
265 /*@
266    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
267    ordering and a global parallel ordering.
268 
269    Note Collective
270 
271    Input Parameters:
272 .  mapping - mapping data structure
273 
274    Level: advanced
275 
276 .seealso: ISLocalToGlobalMappingCreate()
277 @*/
278 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
279 {
280   PetscErrorCode ierr;
281   PetscFunctionBegin;
282   if (!*mapping) PetscFunctionReturn(0);
283   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
284   if (!--((PetscObject)(*mapping))->refct > 0) {
285     ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
286     ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
287     ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
288   }
289   *mapping = 0;
290   PetscFunctionReturn(0);
291 }
292 
293 #undef __FUNCT__
294 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
295 /*@
296     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
297     a new index set using the global numbering defined in an ISLocalToGlobalMapping
298     context.
299 
300     Not collective
301 
302     Input Parameters:
303 +   mapping - mapping between local and global numbering
304 -   is - index set in local numbering
305 
306     Output Parameters:
307 .   newis - index set in global numbering
308 
309     Level: advanced
310 
311     Concepts: mapping^local to global
312 
313 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
314           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
315 @*/
316 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
317 {
318   PetscErrorCode ierr;
319   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
320   const PetscInt *idxin;
321 
322   PetscFunctionBegin;
323   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
324   PetscValidHeaderSpecific(is,IS_CLASSID,2);
325   PetscValidPointer(newis,3);
326 
327   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
328   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
329   idxmap = mapping->indices;
330 
331   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
332   for (i=0; i<n; i++) {
333     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
334     idxout[i] = idxmap[idxin[i]];
335   }
336   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
337   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
338   PetscFunctionReturn(0);
339 }
340 
341 /*MC
342    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
343    and converts them to the global numbering.
344 
345    Synopsis:
346    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
347 
348    Not collective
349 
350    Input Parameters:
351 +  mapping - the local to global mapping context
352 .  N - number of integers
353 -  in - input indices in local numbering
354 
355    Output Parameter:
356 .  out - indices in global numbering
357 
358    Notes:
359    The in and out array parameters may be identical.
360 
361    Level: advanced
362 
363 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
364           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
365           AOPetscToApplication(), ISGlobalToLocalMappingApply()
366 
367     Concepts: mapping^local to global
368 
369 M*/
370 
371 /* -----------------------------------------------------------------------------------------*/
372 
373 #undef __FUNCT__
374 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
375 /*
376     Creates the global fields in the ISLocalToGlobalMapping structure
377 */
378 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
379 {
380   PetscErrorCode ierr;
381   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
382 
383   PetscFunctionBegin;
384   end   = 0;
385   start = 100000000;
386 
387   for (i=0; i<n; i++) {
388     if (idx[i] < 0) continue;
389     if (idx[i] < start) start = idx[i];
390     if (idx[i] > end)   end   = idx[i];
391   }
392   if (start > end) {start = 0; end = -1;}
393   mapping->globalstart = start;
394   mapping->globalend   = end;
395 
396   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
397   mapping->globals = globals;
398   for (i=0; i<end-start+1; i++) {
399     globals[i] = -1;
400   }
401   for (i=0; i<n; i++) {
402     if (idx[i] < 0) continue;
403     globals[idx[i] - start] = i;
404   }
405 
406   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
407   PetscFunctionReturn(0);
408 }
409 
410 #undef __FUNCT__
411 #define __FUNCT__ "ISGlobalToLocalMappingApply"
412 /*@
413     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
414     specified with a global numbering.
415 
416     Not collective
417 
418     Input Parameters:
419 +   mapping - mapping between local and global numbering
420 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
421            IS_GTOLM_DROP - drops the indices with no local value from the output list
422 .   n - number of global indices to map
423 -   idx - global indices to map
424 
425     Output Parameters:
426 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
427 -   idxout - local index of each global index, one must pass in an array long enough
428              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
429              idxout == PETSC_NULL to determine the required length (returned in nout)
430              and then allocate the required space and call ISGlobalToLocalMappingApply()
431              a second time to set the values.
432 
433     Notes:
434     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
435 
436     This is not scalable in memory usage. Each processor requires O(Nglobal) size
437     array to compute these.
438 
439     Level: advanced
440 
441     Concepts: mapping^global to local
442 
443 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
444           ISLocalToGlobalMappingDestroy()
445 @*/
446 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
447                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
448 {
449   PetscInt       i,*globals,nf = 0,tmp,start,end;
450   PetscErrorCode ierr;
451 
452   PetscFunctionBegin;
453   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
454   if (!mapping->globals) {
455     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
456   }
457   globals = mapping->globals;
458   start   = mapping->globalstart;
459   end     = mapping->globalend;
460 
461   if (type == IS_GTOLM_MASK) {
462     if (idxout) {
463       for (i=0; i<n; i++) {
464         if (idx[i] < 0) idxout[i] = idx[i];
465         else if (idx[i] < start) idxout[i] = -1;
466         else if (idx[i] > end)   idxout[i] = -1;
467         else                     idxout[i] = globals[idx[i] - start];
468       }
469     }
470     if (nout) *nout = n;
471   } else {
472     if (idxout) {
473       for (i=0; i<n; i++) {
474         if (idx[i] < 0) continue;
475         if (idx[i] < start) continue;
476         if (idx[i] > end) continue;
477         tmp = globals[idx[i] - start];
478         if (tmp < 0) continue;
479         idxout[nf++] = tmp;
480       }
481     } else {
482       for (i=0; i<n; i++) {
483         if (idx[i] < 0) continue;
484         if (idx[i] < start) continue;
485         if (idx[i] > end) continue;
486         tmp = globals[idx[i] - start];
487         if (tmp < 0) continue;
488         nf++;
489       }
490     }
491     if (nout) *nout = nf;
492   }
493 
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
499 /*@C
500     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
501      each index shared by more than one processor
502 
503     Collective on ISLocalToGlobalMapping
504 
505     Input Parameters:
506 .   mapping - the mapping from local to global indexing
507 
508     Output Parameter:
509 +   nproc - number of processors that are connected to this one
510 .   proc - neighboring processors
511 .   numproc - number of indices for each subdomain (processor)
512 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
513 
514     Level: advanced
515 
516     Concepts: mapping^local to global
517 
518     Fortran Usage:
519 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
520 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
521           PetscInt indices[nproc][numprocmax],ierr)
522         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
523         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
524 
525 
526 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
527           ISLocalToGlobalMappingRestoreInfo()
528 @*/
529 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
530 {
531   PetscErrorCode ierr;
532   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
533   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
534   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
535   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
536   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
537   PetscInt       first_procs,first_numprocs,*first_indices;
538   MPI_Request    *recv_waits,*send_waits;
539   MPI_Status     recv_status,*send_status,*recv_statuses;
540   MPI_Comm       comm = ((PetscObject)mapping)->comm;
541   PetscBool      debug = PETSC_FALSE;
542 
543   PetscFunctionBegin;
544   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
545   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
546   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
547   if (size == 1) {
548     *nproc         = 0;
549     *procs         = PETSC_NULL;
550     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
551     (*numprocs)[0] = 0;
552     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
553     (*indices)[0]  = PETSC_NULL;
554     PetscFunctionReturn(0);
555   }
556 
557   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
558 
559   /*
560     Notes on ISLocalToGlobalMappingGetInfo
561 
562     globally owned node - the nodes that have been assigned to this processor in global
563            numbering, just for this routine.
564 
565     nontrivial globally owned node - node assigned to this processor that is on a subdomain
566            boundary (i.e. is has more than one local owner)
567 
568     locally owned node - node that exists on this processors subdomain
569 
570     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
571            local subdomain
572   */
573   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
574   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
575   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
576 
577   for (i=0; i<n; i++) {
578     if (lindices[i] > max) max = lindices[i];
579   }
580   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
581   Ng++;
582   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
583   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
584   scale  = Ng/size + 1;
585   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
586   rstart = scale*rank;
587 
588   /* determine ownership ranges of global indices */
589   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
590   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
591 
592   /* determine owners of each local node  */
593   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
594   for (i=0; i<n; i++) {
595     proc             = lindices[i]/scale; /* processor that globally owns this index */
596     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
597     owner[i]         = proc;
598     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
599   }
600   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
601   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
602 
603   /* inform other processors of number of messages and max length*/
604   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
605   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
606 
607   /* post receives for owned rows */
608   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
609   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
610   for (i=0; i<nrecvs; i++) {
611     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
612   }
613 
614   /* pack messages containing lists of local nodes to owners */
615   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
616   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
617   starts[0]  = 0;
618   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
619   for (i=0; i<n; i++) {
620     sends[starts[owner[i]]++] = lindices[i];
621     sends[starts[owner[i]]++] = i;
622   }
623   ierr = PetscFree(owner);CHKERRQ(ierr);
624   starts[0]  = 0;
625   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
626 
627   /* send the messages */
628   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
629   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
630   cnt = 0;
631   for (i=0; i<size; i++) {
632     if (nprocs[2*i]) {
633       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
634       dest[cnt] = i;
635       cnt++;
636     }
637   }
638   ierr = PetscFree(starts);CHKERRQ(ierr);
639 
640   /* wait on receives */
641   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
642   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
643   cnt  = nrecvs;
644   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
645   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
646   while (cnt) {
647     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
648     /* unpack receives into our local space */
649     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
650     source[imdex]  = recv_status.MPI_SOURCE;
651     len[imdex]     = len[imdex]/2;
652     /* count how many local owners for each of my global owned indices */
653     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
654     cnt--;
655   }
656   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
657 
658   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
659   nowned  = 0;
660   nownedm = 0;
661   for (i=0; i<ng; i++) {
662     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
663   }
664 
665   /* create single array to contain rank of all local owners of each globally owned index */
666   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
667   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
668   starts[0] = 0;
669   for (i=1; i<ng; i++) {
670     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
671     else starts[i] = starts[i-1];
672   }
673 
674   /* for each nontrival globally owned node list all arriving processors */
675   for (i=0; i<nrecvs; i++) {
676     for (j=0; j<len[i]; j++) {
677       node = recvs[2*i*nmax+2*j]-rstart;
678       if (nownedsenders[node] > 1) {
679         ownedsenders[starts[node]++] = source[i];
680       }
681     }
682   }
683 
684   if (debug) { /* -----------------------------------  */
685     starts[0]    = 0;
686     for (i=1; i<ng; i++) {
687       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
688       else starts[i] = starts[i-1];
689     }
690     for (i=0; i<ng; i++) {
691       if (nownedsenders[i] > 1) {
692         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
693         for (j=0; j<nownedsenders[i]; j++) {
694           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
695         }
696         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
697       }
698     }
699     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
700   }/* -----------------------------------  */
701 
702   /* wait on original sends */
703   if (nsends) {
704     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
705     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
706     ierr = PetscFree(send_status);CHKERRQ(ierr);
707   }
708   ierr = PetscFree(send_waits);CHKERRQ(ierr);
709   ierr = PetscFree(sends);CHKERRQ(ierr);
710   ierr = PetscFree(nprocs);CHKERRQ(ierr);
711 
712   /* pack messages to send back to local owners */
713   starts[0]    = 0;
714   for (i=1; i<ng; i++) {
715     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
716     else starts[i] = starts[i-1];
717   }
718   nsends2 = nrecvs;
719   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
720   for (i=0; i<nrecvs; i++) {
721     nprocs[i] = 1;
722     for (j=0; j<len[i]; j++) {
723       node = recvs[2*i*nmax+2*j]-rstart;
724       if (nownedsenders[node] > 1) {
725         nprocs[i] += 2 + nownedsenders[node];
726       }
727     }
728   }
729   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
730   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
731   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
732   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
733   /*
734      Each message is 1 + nprocs[i] long, and consists of
735        (0) the number of nodes being sent back
736        (1) the local node number,
737        (2) the number of processors sharing it,
738        (3) the processors sharing it
739   */
740   for (i=0; i<nsends2; i++) {
741     cnt = 1;
742     sends2[starts2[i]] = 0;
743     for (j=0; j<len[i]; j++) {
744       node = recvs[2*i*nmax+2*j]-rstart;
745       if (nownedsenders[node] > 1) {
746         sends2[starts2[i]]++;
747         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
748         sends2[starts2[i]+cnt++] = nownedsenders[node];
749         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
750         cnt += nownedsenders[node];
751       }
752     }
753   }
754 
755   /* receive the message lengths */
756   nrecvs2 = nsends;
757   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
758   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
759   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
760   for (i=0; i<nrecvs2; i++) {
761     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
762   }
763 
764   /* send the message lengths */
765   for (i=0; i<nsends2; i++) {
766     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
767   }
768 
769   /* wait on receives of lens */
770   if (nrecvs2) {
771     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
772     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
773     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
774   }
775   ierr = PetscFree(recv_waits);
776 
777   starts3[0] = 0;
778   nt         = 0;
779   for (i=0; i<nrecvs2-1; i++) {
780     starts3[i+1] = starts3[i] + lens2[i];
781     nt          += lens2[i];
782   }
783   nt += lens2[nrecvs2-1];
784 
785   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
786   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
787   for (i=0; i<nrecvs2; i++) {
788     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
789   }
790 
791   /* send the messages */
792   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
793   for (i=0; i<nsends2; i++) {
794     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
795   }
796 
797   /* wait on receives */
798   if (nrecvs2) {
799     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
800     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
801     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
802   }
803   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
804   ierr = PetscFree(nprocs);CHKERRQ(ierr);
805 
806   if (debug) { /* -----------------------------------  */
807     cnt = 0;
808     for (i=0; i<nrecvs2; i++) {
809       nt = recvs2[cnt++];
810       for (j=0; j<nt; j++) {
811         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
812         for (k=0; k<recvs2[cnt+1]; k++) {
813           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
814         }
815         cnt += 2 + recvs2[cnt+1];
816         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
817       }
818     }
819     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
820   } /* -----------------------------------  */
821 
822   /* count number subdomains for each local node */
823   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
824   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
825   cnt  = 0;
826   for (i=0; i<nrecvs2; i++) {
827     nt = recvs2[cnt++];
828     for (j=0; j<nt; j++) {
829       for (k=0; k<recvs2[cnt+1]; k++) {
830         nprocs[recvs2[cnt+2+k]]++;
831       }
832       cnt += 2 + recvs2[cnt+1];
833     }
834   }
835   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
836   *nproc    = nt;
837   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
838   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
839   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
840   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
841   cnt       = 0;
842   for (i=0; i<size; i++) {
843     if (nprocs[i] > 0) {
844       bprocs[i]        = cnt;
845       (*procs)[cnt]    = i;
846       (*numprocs)[cnt] = nprocs[i];
847       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
848       cnt++;
849     }
850   }
851 
852   /* make the list of subdomains for each nontrivial local node */
853   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
854   cnt  = 0;
855   for (i=0; i<nrecvs2; i++) {
856     nt = recvs2[cnt++];
857     for (j=0; j<nt; j++) {
858       for (k=0; k<recvs2[cnt+1]; k++) {
859         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
860       }
861       cnt += 2 + recvs2[cnt+1];
862     }
863   }
864   ierr = PetscFree(bprocs);CHKERRQ(ierr);
865   ierr = PetscFree(recvs2);CHKERRQ(ierr);
866 
867   /* sort the node indexing by their global numbers */
868   nt = *nproc;
869   for (i=0; i<nt; i++) {
870     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
871     for (j=0; j<(*numprocs)[i]; j++) {
872       tmp[j] = lindices[(*indices)[i][j]];
873     }
874     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
875     ierr = PetscFree(tmp);CHKERRQ(ierr);
876   }
877 
878   if (debug) { /* -----------------------------------  */
879     nt = *nproc;
880     for (i=0; i<nt; i++) {
881       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
882       for (j=0; j<(*numprocs)[i]; j++) {
883         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
884       }
885       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
886     }
887     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
888   } /* -----------------------------------  */
889 
890   /* wait on sends */
891   if (nsends2) {
892     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
893     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
894     ierr = PetscFree(send_status);CHKERRQ(ierr);
895   }
896 
897   ierr = PetscFree(starts3);CHKERRQ(ierr);
898   ierr = PetscFree(dest);CHKERRQ(ierr);
899   ierr = PetscFree(send_waits);CHKERRQ(ierr);
900 
901   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
902   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
903   ierr = PetscFree(starts);CHKERRQ(ierr);
904   ierr = PetscFree(starts2);CHKERRQ(ierr);
905   ierr = PetscFree(lens2);CHKERRQ(ierr);
906 
907   ierr = PetscFree(source);CHKERRQ(ierr);
908   ierr = PetscFree(len);CHKERRQ(ierr);
909   ierr = PetscFree(recvs);CHKERRQ(ierr);
910   ierr = PetscFree(nprocs);CHKERRQ(ierr);
911   ierr = PetscFree(sends2);CHKERRQ(ierr);
912 
913   /* put the information about myself as the first entry in the list */
914   first_procs    = (*procs)[0];
915   first_numprocs = (*numprocs)[0];
916   first_indices  = (*indices)[0];
917   for (i=0; i<*nproc; i++) {
918     if ((*procs)[i] == rank) {
919       (*procs)[0]    = (*procs)[i];
920       (*numprocs)[0] = (*numprocs)[i];
921       (*indices)[0]  = (*indices)[i];
922       (*procs)[i]    = first_procs;
923       (*numprocs)[i] = first_numprocs;
924       (*indices)[i]  = first_indices;
925       break;
926     }
927   }
928   PetscFunctionReturn(0);
929 }
930 
931 #undef __FUNCT__
932 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
933 /*@C
934     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
935 
936     Collective on ISLocalToGlobalMapping
937 
938     Input Parameters:
939 .   mapping - the mapping from local to global indexing
940 
941     Output Parameter:
942 +   nproc - number of processors that are connected to this one
943 .   proc - neighboring processors
944 .   numproc - number of indices for each processor
945 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
946 
947     Level: advanced
948 
949 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
950           ISLocalToGlobalMappingGetInfo()
951 @*/
952 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
953 {
954   PetscErrorCode ierr;
955   PetscInt       i;
956 
957   PetscFunctionBegin;
958   ierr = PetscFree(*procs);CHKERRQ(ierr);
959   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
960   if (*indices) {
961     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
962     for (i=1; i<*nproc; i++) {
963       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
964     }
965     ierr = PetscFree(*indices);CHKERRQ(ierr);
966   }
967   PetscFunctionReturn(0);
968 }
969 
970 #undef __FUNCT__
971 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
972 /*@C
973    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
974 
975    Not Collective
976 
977    Input Arguments:
978 . ltog - local to global mapping
979 
980    Output Arguments:
981 . array - array of indices
982 
983    Level: advanced
984 
985 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
986 @*/
987 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
988 {
989 
990   PetscFunctionBegin;
991   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
992   PetscValidPointer(array,2);
993   *array = ltog->indices;
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
999 /*@C
1000    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1001 
1002    Not Collective
1003 
1004    Input Arguments:
1005 + ltog - local to global mapping
1006 - array - array of indices
1007 
1008    Level: advanced
1009 
1010 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1011 @*/
1012 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1013 {
1014 
1015   PetscFunctionBegin;
1016   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1017   PetscValidPointer(array,2);
1018   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1019   *array = PETSC_NULL;
1020   PetscFunctionReturn(0);
1021 }
1022 
1023 #undef __FUNCT__
1024 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1025 /*@C
1026    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1027 
1028    Not Collective
1029 
1030    Input Arguments:
1031 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1032 . n - number of mappings to concatenate
1033 - ltogs - local to global mappings
1034 
1035    Output Arguments:
1036 . ltogcat - new mapping
1037 
1038    Level: advanced
1039 
1040 .seealso: ISLocalToGlobalMappingCreate()
1041 @*/
1042 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1043 {
1044   PetscInt       i,cnt,m,*idx;
1045   PetscErrorCode ierr;
1046 
1047   PetscFunctionBegin;
1048   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1049   if (n > 0) PetscValidPointer(ltogs,3);
1050   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1051   PetscValidPointer(ltogcat,4);
1052   for (cnt=0,i=0; i<n; i++) {
1053     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1054     cnt += m;
1055   }
1056   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1057   for (cnt=0,i=0; i<n; i++) {
1058     const PetscInt *subidx;
1059     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1060     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1061     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1062     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1063     cnt += m;
1064   }
1065   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068