xref: /petsc/src/vec/is/utils/isltog.c (revision 5fd668637986a8d8518383a9159eebc368e1d5b4)
1 
2 #include <petsc-private/isimpl.h>    /*I "petscis.h"  I*/
3 
4 PetscClassId  IS_LTOGM_CLASSID;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "ISLocalToGlobalMappingGetSize"
8 /*@C
9     ISLocalToGlobalMappingGetSize - Gets the local size of a local to global mapping.
10 
11     Not Collective
12 
13     Input Parameter:
14 .   ltog - local to global mapping
15 
16     Output Parameter:
17 .   n - the number of entries in the local mapping
18 
19     Level: advanced
20 
21     Concepts: mapping^local to global
22 
23 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
24 @*/
25 PetscErrorCode  ISLocalToGlobalMappingGetSize(ISLocalToGlobalMapping mapping,PetscInt *n)
26 {
27   PetscFunctionBegin;
28   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
29   PetscValidIntPointer(n,2);
30   *n = mapping->n;
31   PetscFunctionReturn(0);
32 }
33 
34 #undef __FUNCT__
35 #define __FUNCT__ "ISLocalToGlobalMappingView"
36 /*@C
37     ISLocalToGlobalMappingView - View a local to global mapping
38 
39     Not Collective
40 
41     Input Parameters:
42 +   ltog - local to global mapping
43 -   viewer - viewer
44 
45     Level: advanced
46 
47     Concepts: mapping^local to global
48 
49 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
50 @*/
51 PetscErrorCode  ISLocalToGlobalMappingView(ISLocalToGlobalMapping mapping,PetscViewer viewer)
52 {
53   PetscInt        i;
54   PetscMPIInt     rank;
55   PetscBool       iascii;
56   PetscErrorCode  ierr;
57 
58   PetscFunctionBegin;
59   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
60   if (!viewer) {
61     ierr = PetscViewerASCIIGetStdout(((PetscObject)mapping)->comm,&viewer);CHKERRQ(ierr);
62   }
63   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
64 
65   ierr = MPI_Comm_rank(((PetscObject)mapping)->comm,&rank);CHKERRQ(ierr);
66   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
67   if (iascii) {
68     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
69     for (i=0; i<mapping->n; i++) {
70       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d %d\n",rank,i,mapping->indices[i]);CHKERRQ(ierr);
71     }
72     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
73     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
74   } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported for ISLocalToGlobalMapping",((PetscObject)viewer)->type_name);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "ISLocalToGlobalMappingCreateIS"
80 /*@
81     ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
82     ordering and a global parallel ordering.
83 
84     Not collective
85 
86     Input Parameter:
87 .   is - index set containing the global numbers for each local number
88 
89     Output Parameter:
90 .   mapping - new mapping data structure
91 
92     Level: advanced
93 
94     Concepts: mapping^local to global
95 
96 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
97 @*/
98 PetscErrorCode  ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
99 {
100   PetscErrorCode ierr;
101   PetscInt       n;
102   const PetscInt *indices;
103   MPI_Comm       comm;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(is,IS_CLASSID,1);
107   PetscValidPointer(mapping,2);
108 
109   ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
110   ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
111   ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
112   ierr = ISLocalToGlobalMappingCreate(comm,n,indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
113   ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "ISLocalToGlobalMappingCreateSF"
119 /*@C
120     ISLocalToGlobalMappingCreateSF - Creates a mapping between a local (0 to n)
121     ordering and a global parallel ordering.
122 
123     Collective
124 
125     Input Parameter:
126 +   sf - star forest mapping contiguous local indices to (rank, offset)
127 -   start - first global index on this process
128 
129     Output Parameter:
130 .   mapping - new mapping data structure
131 
132     Level: advanced
133 
134     Concepts: mapping^local to global
135 
136 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
137 @*/
138 PetscErrorCode ISLocalToGlobalMappingCreateSF(PetscSF sf,PetscInt start,ISLocalToGlobalMapping *mapping)
139 {
140   PetscErrorCode ierr;
141   PetscInt       i,maxlocal,nroots,nleaves,*globals,*ltog;
142   const PetscInt *ilocal;
143   MPI_Comm       comm;
144 
145   PetscFunctionBegin;
146   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
147   PetscValidPointer(mapping,3);
148 
149   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
150   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,PETSC_NULL);CHKERRQ(ierr);
151   if (ilocal) {for (i=0,maxlocal=0; i<nleaves; i++) maxlocal = PetscMax(maxlocal,ilocal[i]+1);}
152   else maxlocal = nleaves;
153   ierr = PetscMalloc(nroots*sizeof(PetscInt),&globals);CHKERRQ(ierr);
154   ierr = PetscMalloc(maxlocal*sizeof(PetscInt),&ltog);CHKERRQ(ierr);
155   for (i=0; i<nroots; i++) globals[i] = start + i;
156   for (i=0; i<maxlocal; i++) ltog[i] = -1;
157   ierr = PetscSFBcastBegin(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
158   ierr = PetscSFBcastEnd(sf,MPIU_INT,globals,ltog);CHKERRQ(ierr);
159   ierr = ISLocalToGlobalMappingCreate(comm,maxlocal,ltog,PETSC_OWN_POINTER,mapping);CHKERRQ(ierr);
160   ierr = PetscFree(globals);CHKERRQ(ierr);
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "ISLocalToGlobalMappingCreate"
166 /*@
167     ISLocalToGlobalMappingCreate - Creates a mapping between a local (0 to n)
168     ordering and a global parallel ordering.
169 
170     Not Collective, but communicator may have more than one process
171 
172     Input Parameters:
173 +   comm - MPI communicator
174 .   n - the number of local elements
175 .   indices - the global index for each local element, these do not need to be in increasing order (sorted)
176 -   mode - see PetscCopyMode
177 
178     Output Parameter:
179 .   mapping - new mapping data structure
180 
181     Level: advanced
182 
183     Concepts: mapping^local to global
184 
185 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS()
186 @*/
187 PetscErrorCode  ISLocalToGlobalMappingCreate(MPI_Comm cm,PetscInt n,const PetscInt indices[],PetscCopyMode mode,ISLocalToGlobalMapping *mapping)
188 {
189   PetscErrorCode ierr;
190   PetscInt       *in;
191 
192   PetscFunctionBegin;
193   if (n) PetscValidIntPointer(indices,3);
194   PetscValidPointer(mapping,4);
195 
196   *mapping = PETSC_NULL;
197 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
198   ierr = ISInitializePackage(PETSC_NULL);CHKERRQ(ierr);
199 #endif
200 
201   ierr = PetscHeaderCreate(*mapping,_p_ISLocalToGlobalMapping,int,IS_LTOGM_CLASSID,0,"ISLocalToGlobalMapping","Local to global mapping","IS",
202                            cm,ISLocalToGlobalMappingDestroy,ISLocalToGlobalMappingView);CHKERRQ(ierr);
203   (*mapping)->n       = n;
204   /*
205     Do not create the global to local mapping. This is only created if
206     ISGlobalToLocalMapping() is called
207   */
208   (*mapping)->globals = 0;
209   if (mode == PETSC_COPY_VALUES) {
210     ierr = PetscMalloc(n*sizeof(PetscInt),&in);CHKERRQ(ierr);
211     ierr = PetscMemcpy(in,indices,n*sizeof(PetscInt));CHKERRQ(ierr);
212     ierr = PetscLogObjectMemory(*mapping,n*sizeof(PetscInt));CHKERRQ(ierr);
213     (*mapping)->indices = in;
214   } else if (mode == PETSC_OWN_POINTER) {
215     (*mapping)->indices = (PetscInt*)indices;
216   } else SETERRQ(cm,PETSC_ERR_SUP,"Cannot currently use PETSC_USE_POINTER");
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "ISLocalToGlobalMappingBlock"
222 /*@
223     ISLocalToGlobalMappingBlock - Creates a blocked index version of an
224        ISLocalToGlobalMapping that is appropriate for MatSetLocalToGlobalMappingBlock()
225        and VecSetLocalToGlobalMappingBlock().
226 
227     Not Collective, but communicator may have more than one process
228 
229     Input Parameters:
230 +    inmap - original point-wise mapping
231 -    bs - block size
232 
233     Output Parameter:
234 .   outmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
235 
236     Level: advanced
237 
238     Concepts: mapping^local to global
239 
240 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingCreateIS()
241 @*/
242 PetscErrorCode  ISLocalToGlobalMappingBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
243 {
244   PetscErrorCode ierr;
245   PetscInt       *ii,i,n;
246 
247   PetscFunctionBegin;
248   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
249   PetscValidPointer(outmap,3);
250   if (bs > 1) {
251     n    = inmap->n/bs;
252     if (n*bs != inmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Pointwise mapping length is not divisible by block size");
253     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
254     for (i=0; i<n; i++) {
255       ii[i] = inmap->indices[bs*i]/bs;
256     }
257     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
258   } else {
259     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
260     *outmap = inmap;
261   }
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "ISLocalToGlobalMappingUnBlock"
267 /*@
268     ISLocalToGlobalMappingUnBlock - Creates a scalar index version of a blocked
269        ISLocalToGlobalMapping
270 
271     Not Collective, but communicator may have more than one process
272 
273     Input Parameter:
274 + inmap - block based mapping; the indices are relative to BLOCKS, not individual vector or matrix entries.
275 - bs - block size
276 
277     Output Parameter:
278 .   outmap - pointwise mapping
279 
280     Level: advanced
281 
282     Concepts: mapping^local to global
283 
284 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingBlock()
285 @*/
286 PetscErrorCode  ISLocalToGlobalMappingUnBlock(ISLocalToGlobalMapping inmap,PetscInt bs,ISLocalToGlobalMapping *outmap)
287 {
288   PetscErrorCode ierr;
289   PetscInt       *ii,i,n;
290 
291   PetscFunctionBegin;
292   PetscValidHeaderSpecific(inmap,IS_LTOGM_CLASSID,1);
293   PetscValidPointer(outmap,2);
294   if (bs > 1) {
295     n    = inmap->n*bs;
296     ierr = PetscMalloc(n*sizeof(PetscInt),&ii);CHKERRQ(ierr);
297     for (i=0; i<n; i++) {
298       ii[i] = inmap->indices[i/bs]*bs + (i%bs);
299     }
300     ierr = ISLocalToGlobalMappingCreate(((PetscObject)inmap)->comm,n,ii,PETSC_OWN_POINTER,outmap);CHKERRQ(ierr);
301   } else {
302     ierr    = PetscObjectReference((PetscObject)inmap);CHKERRQ(ierr);
303     *outmap = inmap;
304   }
305   PetscFunctionReturn(0);
306 }
307 
308 #undef __FUNCT__
309 #define __FUNCT__ "ISLocalToGlobalMappingDestroy"
310 /*@
311    ISLocalToGlobalMappingDestroy - Destroys a mapping between a local (0 to n)
312    ordering and a global parallel ordering.
313 
314    Note Collective
315 
316    Input Parameters:
317 .  mapping - mapping data structure
318 
319    Level: advanced
320 
321 .seealso: ISLocalToGlobalMappingCreate()
322 @*/
323 PetscErrorCode  ISLocalToGlobalMappingDestroy(ISLocalToGlobalMapping *mapping)
324 {
325   PetscErrorCode ierr;
326 
327   PetscFunctionBegin;
328   if (!*mapping) PetscFunctionReturn(0);
329   PetscValidHeaderSpecific((*mapping),IS_LTOGM_CLASSID,1);
330   if (--((PetscObject)(*mapping))->refct > 0) {*mapping = 0;PetscFunctionReturn(0);}
331   ierr = PetscFree((*mapping)->indices);CHKERRQ(ierr);
332   ierr = PetscFree((*mapping)->globals);CHKERRQ(ierr);
333   ierr = PetscHeaderDestroy(mapping);CHKERRQ(ierr);
334   *mapping = 0;
335   PetscFunctionReturn(0);
336 }
337 
338 #undef __FUNCT__
339 #define __FUNCT__ "ISLocalToGlobalMappingApplyIS"
340 /*@
341     ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
342     a new index set using the global numbering defined in an ISLocalToGlobalMapping
343     context.
344 
345     Not collective
346 
347     Input Parameters:
348 +   mapping - mapping between local and global numbering
349 -   is - index set in local numbering
350 
351     Output Parameters:
352 .   newis - index set in global numbering
353 
354     Level: advanced
355 
356     Concepts: mapping^local to global
357 
358 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
359           ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
360 @*/
361 PetscErrorCode  ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
362 {
363   PetscErrorCode ierr;
364   PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
365   const PetscInt *idxin;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
369   PetscValidHeaderSpecific(is,IS_CLASSID,2);
370   PetscValidPointer(newis,3);
371 
372   ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
373   ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
374   idxmap = mapping->indices;
375 
376   ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
377   for (i=0; i<n; i++) {
378     if (idxin[i] >= Nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
379     idxout[i] = idxmap[idxin[i]];
380   }
381   ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
382   ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idxout,PETSC_OWN_POINTER,newis);CHKERRQ(ierr);
383   PetscFunctionReturn(0);
384 }
385 
386 /*MC
387    ISLocalToGlobalMappingApply - Takes a list of integers in a local numbering
388    and converts them to the global numbering.
389 
390    Synopsis:
391    #include "petscis.h"
392    PetscErrorCode ISLocalToGlobalMappingApply(ISLocalToGlobalMapping mapping,int N,int in[],int out[])
393 
394    Not collective
395 
396    Input Parameters:
397 +  mapping - the local to global mapping context
398 .  N - number of integers
399 -  in - input indices in local numbering
400 
401    Output Parameter:
402 .  out - indices in global numbering
403 
404    Notes:
405    The in and out array parameters may be identical.
406 
407    Level: advanced
408 
409 .seealso: ISLocalToGlobalMappingCreate(),ISLocalToGlobalMappingDestroy(),
410           ISLocalToGlobalMappingApplyIS(),AOCreateBasic(),AOApplicationToPetsc(),
411           AOPetscToApplication(), ISGlobalToLocalMappingApply()
412 
413     Concepts: mapping^local to global
414 
415 M*/
416 
417 /* -----------------------------------------------------------------------------------------*/
418 
419 #undef __FUNCT__
420 #define __FUNCT__ "ISGlobalToLocalMappingSetUp_Private"
421 /*
422     Creates the global fields in the ISLocalToGlobalMapping structure
423 */
424 static PetscErrorCode ISGlobalToLocalMappingSetUp_Private(ISLocalToGlobalMapping mapping)
425 {
426   PetscErrorCode ierr;
427   PetscInt       i,*idx = mapping->indices,n = mapping->n,end,start,*globals;
428 
429   PetscFunctionBegin;
430   end   = 0;
431   start = PETSC_MAX_INT;
432 
433   for (i=0; i<n; i++) {
434     if (idx[i] < 0) continue;
435     if (idx[i] < start) start = idx[i];
436     if (idx[i] > end)   end   = idx[i];
437   }
438   if (start > end) {start = 0; end = -1;}
439   mapping->globalstart = start;
440   mapping->globalend   = end;
441 
442   ierr             = PetscMalloc((end-start+2)*sizeof(PetscInt),&globals);CHKERRQ(ierr);
443   mapping->globals = globals;
444   for (i=0; i<end-start+1; i++) {
445     globals[i] = -1;
446   }
447   for (i=0; i<n; i++) {
448     if (idx[i] < 0) continue;
449     globals[idx[i] - start] = i;
450   }
451 
452   ierr = PetscLogObjectMemory(mapping,(end-start+1)*sizeof(PetscInt));CHKERRQ(ierr);
453   PetscFunctionReturn(0);
454 }
455 
456 #undef __FUNCT__
457 #define __FUNCT__ "ISGlobalToLocalMappingApply"
458 /*@
459     ISGlobalToLocalMappingApply - Provides the local numbering for a list of integers
460     specified with a global numbering.
461 
462     Not collective
463 
464     Input Parameters:
465 +   mapping - mapping between local and global numbering
466 .   type - IS_GTOLM_MASK - replaces global indices with no local value with -1
467            IS_GTOLM_DROP - drops the indices with no local value from the output list
468 .   n - number of global indices to map
469 -   idx - global indices to map
470 
471     Output Parameters:
472 +   nout - number of indices in output array (if type == IS_GTOLM_MASK then nout = n)
473 -   idxout - local index of each global index, one must pass in an array long enough
474              to hold all the indices. You can call ISGlobalToLocalMappingApply() with
475              idxout == PETSC_NULL to determine the required length (returned in nout)
476              and then allocate the required space and call ISGlobalToLocalMappingApply()
477              a second time to set the values.
478 
479     Notes:
480     Either nout or idxout may be PETSC_NULL. idx and idxout may be identical.
481 
482     This is not scalable in memory usage. Each processor requires O(Nglobal) size
483     array to compute these.
484 
485     Level: advanced
486 
487     Developer Note: The manual page states that idx and idxout may be identical but the calling
488        sequence declares idx as const so it cannot be the same as idxout.
489 
490     Concepts: mapping^global to local
491 
492 .seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
493           ISLocalToGlobalMappingDestroy()
494 @*/
495 PetscErrorCode  ISGlobalToLocalMappingApply(ISLocalToGlobalMapping mapping,ISGlobalToLocalMappingType type,
496                                   PetscInt n,const PetscInt idx[],PetscInt *nout,PetscInt idxout[])
497 {
498   PetscInt       i,*globals,nf = 0,tmp,start,end;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
503   if (!mapping->globals) {
504     ierr = ISGlobalToLocalMappingSetUp_Private(mapping);CHKERRQ(ierr);
505   }
506   globals = mapping->globals;
507   start   = mapping->globalstart;
508   end     = mapping->globalend;
509 
510   if (type == IS_GTOLM_MASK) {
511     if (idxout) {
512       for (i=0; i<n; i++) {
513         if (idx[i] < 0) idxout[i] = idx[i];
514         else if (idx[i] < start) idxout[i] = -1;
515         else if (idx[i] > end)   idxout[i] = -1;
516         else                     idxout[i] = globals[idx[i] - start];
517       }
518     }
519     if (nout) *nout = n;
520   } else {
521     if (idxout) {
522       for (i=0; i<n; i++) {
523         if (idx[i] < 0) continue;
524         if (idx[i] < start) continue;
525         if (idx[i] > end) continue;
526         tmp = globals[idx[i] - start];
527         if (tmp < 0) continue;
528         idxout[nf++] = tmp;
529       }
530     } else {
531       for (i=0; i<n; i++) {
532         if (idx[i] < 0) continue;
533         if (idx[i] < start) continue;
534         if (idx[i] > end) continue;
535         tmp = globals[idx[i] - start];
536         if (tmp < 0) continue;
537         nf++;
538       }
539     }
540     if (nout) *nout = nf;
541   }
542 
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "ISLocalToGlobalMappingGetInfo"
548 /*@C
549     ISLocalToGlobalMappingGetInfo - Gets the neighbor information for each processor and
550      each index shared by more than one processor
551 
552     Collective on ISLocalToGlobalMapping
553 
554     Input Parameters:
555 .   mapping - the mapping from local to global indexing
556 
557     Output Parameter:
558 +   nproc - number of processors that are connected to this one
559 .   proc - neighboring processors
560 .   numproc - number of indices for each subdomain (processor)
561 -   indices - indices of nodes (in local numbering) shared with neighbors (sorted by global numbering)
562 
563     Level: advanced
564 
565     Concepts: mapping^local to global
566 
567     Fortran Usage:
568 $        ISLocalToGlobalMpngGetInfoSize(ISLocalToGlobalMapping,PetscInt nproc,PetscInt numprocmax,ierr) followed by
569 $        ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping,PetscInt nproc, PetscInt procs[nproc],PetscInt numprocs[nproc],
570           PetscInt indices[nproc][numprocmax],ierr)
571         There is no ISLocalToGlobalMappingRestoreInfo() in Fortran. You must make sure that procs[], numprocs[] and
572         indices[][] are large enough arrays, either by allocating them dynamically or defining static ones large enough.
573 
574 
575 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
576           ISLocalToGlobalMappingRestoreInfo()
577 @*/
578 PetscErrorCode  ISLocalToGlobalMappingGetInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
579 {
580   PetscErrorCode ierr;
581   PetscMPIInt    size,rank,tag1,tag2,tag3,*len,*source,imdex;
582   PetscInt       i,n = mapping->n,Ng,ng,max = 0,*lindices = mapping->indices;
583   PetscInt       *nprocs,*owner,nsends,*sends,j,*starts,nmax,nrecvs,*recvs,proc;
584   PetscInt       cnt,scale,*ownedsenders,*nownedsenders,rstart,nowned;
585   PetscInt       node,nownedm,nt,*sends2,nsends2,*starts2,*lens2,*dest,nrecvs2,*starts3,*recvs2,k,*bprocs,*tmp;
586   PetscInt       first_procs,first_numprocs,*first_indices;
587   MPI_Request    *recv_waits,*send_waits;
588   MPI_Status     recv_status,*send_status,*recv_statuses;
589   MPI_Comm       comm = ((PetscObject)mapping)->comm;
590   PetscBool      debug = PETSC_FALSE;
591 
592   PetscFunctionBegin;
593   PetscValidHeaderSpecific(mapping,IS_LTOGM_CLASSID,1);
594   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
595   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
596   if (size == 1) {
597     *nproc         = 0;
598     *procs         = PETSC_NULL;
599     ierr           = PetscMalloc(sizeof(PetscInt),numprocs);CHKERRQ(ierr);
600     (*numprocs)[0] = 0;
601     ierr           = PetscMalloc(sizeof(PetscInt*),indices);CHKERRQ(ierr);
602     (*indices)[0]  = PETSC_NULL;
603     PetscFunctionReturn(0);
604   }
605 
606   ierr = PetscOptionsGetBool(PETSC_NULL,"-islocaltoglobalmappinggetinfo_debug",&debug,PETSC_NULL);CHKERRQ(ierr);
607 
608   /*
609     Notes on ISLocalToGlobalMappingGetInfo
610 
611     globally owned node - the nodes that have been assigned to this processor in global
612            numbering, just for this routine.
613 
614     nontrivial globally owned node - node assigned to this processor that is on a subdomain
615            boundary (i.e. is has more than one local owner)
616 
617     locally owned node - node that exists on this processors subdomain
618 
619     nontrivial locally owned node - node that is not in the interior (i.e. has more than one
620            local subdomain
621   */
622   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag1);CHKERRQ(ierr);
623   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag2);CHKERRQ(ierr);
624   ierr = PetscObjectGetNewTag((PetscObject)mapping,&tag3);CHKERRQ(ierr);
625 
626   for (i=0; i<n; i++) {
627     if (lindices[i] > max) max = lindices[i];
628   }
629   ierr   = MPI_Allreduce(&max,&Ng,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
630   Ng++;
631   ierr   = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
632   ierr   = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
633   scale  = Ng/size + 1;
634   ng     = scale; if (rank == size-1) ng = Ng - scale*(size-1); ng = PetscMax(1,ng);
635   rstart = scale*rank;
636 
637   /* determine ownership ranges of global indices */
638   ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
639   ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr);
640 
641   /* determine owners of each local node  */
642   ierr = PetscMalloc(n*sizeof(PetscInt),&owner);CHKERRQ(ierr);
643   for (i=0; i<n; i++) {
644     proc             = lindices[i]/scale; /* processor that globally owns this index */
645     nprocs[2*proc+1] = 1;                 /* processor globally owns at least one of ours */
646     owner[i]         = proc;
647     nprocs[2*proc]++;                     /* count of how many that processor globally owns of ours */
648   }
649   nsends = 0; for (i=0; i<size; i++) nsends += nprocs[2*i+1];
650   ierr = PetscInfo1(mapping,"Number of global owners for my local data %d\n",nsends);CHKERRQ(ierr);
651 
652   /* inform other processors of number of messages and max length*/
653   ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr);
654   ierr = PetscInfo1(mapping,"Number of local owners for my global data %d\n",nrecvs);CHKERRQ(ierr);
655 
656   /* post receives for owned rows */
657   ierr = PetscMalloc((2*nrecvs+1)*(nmax+1)*sizeof(PetscInt),&recvs);CHKERRQ(ierr);
658   ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
659   for (i=0; i<nrecvs; i++) {
660     ierr = MPI_Irecv(recvs+2*nmax*i,2*nmax,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,recv_waits+i);CHKERRQ(ierr);
661   }
662 
663   /* pack messages containing lists of local nodes to owners */
664   ierr       = PetscMalloc((2*n+1)*sizeof(PetscInt),&sends);CHKERRQ(ierr);
665   ierr       = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
666   starts[0]  = 0;
667   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
668   for (i=0; i<n; i++) {
669     sends[starts[owner[i]]++] = lindices[i];
670     sends[starts[owner[i]]++] = i;
671   }
672   ierr = PetscFree(owner);CHKERRQ(ierr);
673   starts[0]  = 0;
674   for (i=1; i<size; i++) { starts[i] = starts[i-1] + 2*nprocs[2*i-2];}
675 
676   /* send the messages */
677   ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
678   ierr = PetscMalloc((nsends+1)*sizeof(PetscInt),&dest);CHKERRQ(ierr);
679   cnt = 0;
680   for (i=0; i<size; i++) {
681     if (nprocs[2*i]) {
682       ierr      = MPI_Isend(sends+starts[i],2*nprocs[2*i],MPIU_INT,i,tag1,comm,send_waits+cnt);CHKERRQ(ierr);
683       dest[cnt] = i;
684       cnt++;
685     }
686   }
687   ierr = PetscFree(starts);CHKERRQ(ierr);
688 
689   /* wait on receives */
690   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&source);CHKERRQ(ierr);
691   ierr = PetscMalloc((nrecvs+1)*sizeof(PetscMPIInt),&len);CHKERRQ(ierr);
692   cnt  = nrecvs;
693   ierr = PetscMalloc((ng+1)*sizeof(PetscInt),&nownedsenders);CHKERRQ(ierr);
694   ierr = PetscMemzero(nownedsenders,ng*sizeof(PetscInt));CHKERRQ(ierr);
695   while (cnt) {
696     ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
697     /* unpack receives into our local space */
698     ierr           = MPI_Get_count(&recv_status,MPIU_INT,&len[imdex]);CHKERRQ(ierr);
699     source[imdex]  = recv_status.MPI_SOURCE;
700     len[imdex]     = len[imdex]/2;
701     /* count how many local owners for each of my global owned indices */
702     for (i=0; i<len[imdex]; i++) nownedsenders[recvs[2*imdex*nmax+2*i]-rstart]++;
703     cnt--;
704   }
705   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
706 
707   /* count how many globally owned indices are on an edge multiplied by how many processors own them. */
708   nowned  = 0;
709   nownedm = 0;
710   for (i=0; i<ng; i++) {
711     if (nownedsenders[i] > 1) {nownedm += nownedsenders[i]; nowned++;}
712   }
713 
714   /* create single array to contain rank of all local owners of each globally owned index */
715   ierr      = PetscMalloc((nownedm+1)*sizeof(PetscInt),&ownedsenders);CHKERRQ(ierr);
716   ierr      = PetscMalloc((ng+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr);
717   starts[0] = 0;
718   for (i=1; i<ng; i++) {
719     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
720     else starts[i] = starts[i-1];
721   }
722 
723   /* for each nontrival globally owned node list all arriving processors */
724   for (i=0; i<nrecvs; i++) {
725     for (j=0; j<len[i]; j++) {
726       node = recvs[2*i*nmax+2*j]-rstart;
727       if (nownedsenders[node] > 1) {
728         ownedsenders[starts[node]++] = source[i];
729       }
730     }
731   }
732 
733   if (debug) { /* -----------------------------------  */
734     starts[0]    = 0;
735     for (i=1; i<ng; i++) {
736       if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
737       else starts[i] = starts[i-1];
738     }
739     for (i=0; i<ng; i++) {
740       if (nownedsenders[i] > 1) {
741         ierr = PetscSynchronizedPrintf(comm,"[%d] global node %d local owner processors: ",rank,i+rstart);CHKERRQ(ierr);
742         for (j=0; j<nownedsenders[i]; j++) {
743           ierr = PetscSynchronizedPrintf(comm,"%d ",ownedsenders[starts[i]+j]);CHKERRQ(ierr);
744         }
745         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
746       }
747     }
748     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
749   }/* -----------------------------------  */
750 
751   /* wait on original sends */
752   if (nsends) {
753     ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
754     ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
755     ierr = PetscFree(send_status);CHKERRQ(ierr);
756   }
757   ierr = PetscFree(send_waits);CHKERRQ(ierr);
758   ierr = PetscFree(sends);CHKERRQ(ierr);
759   ierr = PetscFree(nprocs);CHKERRQ(ierr);
760 
761   /* pack messages to send back to local owners */
762   starts[0]    = 0;
763   for (i=1; i<ng; i++) {
764     if (nownedsenders[i-1] > 1) starts[i] = starts[i-1] + nownedsenders[i-1];
765     else starts[i] = starts[i-1];
766   }
767   nsends2 = nrecvs;
768   ierr    = PetscMalloc((nsends2+1)*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); /* length of each message */
769   for (i=0; i<nrecvs; i++) {
770     nprocs[i] = 1;
771     for (j=0; j<len[i]; j++) {
772       node = recvs[2*i*nmax+2*j]-rstart;
773       if (nownedsenders[node] > 1) {
774         nprocs[i] += 2 + nownedsenders[node];
775       }
776     }
777   }
778   nt = 0; for (i=0; i<nsends2; i++) nt += nprocs[i];
779   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&sends2);CHKERRQ(ierr);
780   ierr = PetscMalloc((nsends2+1)*sizeof(PetscInt),&starts2);CHKERRQ(ierr);
781   starts2[0] = 0; for (i=1; i<nsends2; i++) starts2[i] = starts2[i-1] + nprocs[i-1];
782   /*
783      Each message is 1 + nprocs[i] long, and consists of
784        (0) the number of nodes being sent back
785        (1) the local node number,
786        (2) the number of processors sharing it,
787        (3) the processors sharing it
788   */
789   for (i=0; i<nsends2; i++) {
790     cnt = 1;
791     sends2[starts2[i]] = 0;
792     for (j=0; j<len[i]; j++) {
793       node = recvs[2*i*nmax+2*j]-rstart;
794       if (nownedsenders[node] > 1) {
795         sends2[starts2[i]]++;
796         sends2[starts2[i]+cnt++] = recvs[2*i*nmax+2*j+1];
797         sends2[starts2[i]+cnt++] = nownedsenders[node];
798         ierr = PetscMemcpy(&sends2[starts2[i]+cnt],&ownedsenders[starts[node]],nownedsenders[node]*sizeof(PetscInt));CHKERRQ(ierr);
799         cnt += nownedsenders[node];
800       }
801     }
802   }
803 
804   /* receive the message lengths */
805   nrecvs2 = nsends;
806   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&lens2);CHKERRQ(ierr);
807   ierr = PetscMalloc((nrecvs2+1)*sizeof(PetscInt),&starts3);CHKERRQ(ierr);
808   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
809   for (i=0; i<nrecvs2; i++) {
810     ierr = MPI_Irecv(&lens2[i],1,MPIU_INT,dest[i],tag2,comm,recv_waits+i);CHKERRQ(ierr);
811   }
812 
813   /* send the message lengths */
814   for (i=0; i<nsends2; i++) {
815     ierr = MPI_Send(&nprocs[i],1,MPIU_INT,source[i],tag2,comm);CHKERRQ(ierr);
816   }
817 
818   /* wait on receives of lens */
819   if (nrecvs2) {
820     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
821     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
822     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
823   }
824   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
825 
826   starts3[0] = 0;
827   nt         = 0;
828   for (i=0; i<nrecvs2-1; i++) {
829     starts3[i+1] = starts3[i] + lens2[i];
830     nt          += lens2[i];
831   }
832   if (nrecvs2) nt += lens2[nrecvs2-1];
833 
834   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),&recvs2);CHKERRQ(ierr);
835   ierr = PetscMalloc((nrecvs2+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr);
836   for (i=0; i<nrecvs2; i++) {
837     ierr = MPI_Irecv(recvs2+starts3[i],lens2[i],MPIU_INT,dest[i],tag3,comm,recv_waits+i);CHKERRQ(ierr);
838   }
839 
840   /* send the messages */
841   ierr = PetscMalloc((nsends2+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr);
842   for (i=0; i<nsends2; i++) {
843     ierr = MPI_Isend(sends2+starts2[i],nprocs[i],MPIU_INT,source[i],tag3,comm,send_waits+i);CHKERRQ(ierr);
844   }
845 
846   /* wait on receives */
847   if (nrecvs2) {
848     ierr = PetscMalloc(nrecvs2*sizeof(MPI_Status),&recv_statuses);CHKERRQ(ierr);
849     ierr = MPI_Waitall(nrecvs2,recv_waits,recv_statuses);CHKERRQ(ierr);
850     ierr = PetscFree(recv_statuses);CHKERRQ(ierr);
851   }
852   ierr = PetscFree(recv_waits);CHKERRQ(ierr);
853   ierr = PetscFree(nprocs);CHKERRQ(ierr);
854 
855   if (debug) { /* -----------------------------------  */
856     cnt = 0;
857     for (i=0; i<nrecvs2; i++) {
858       nt = recvs2[cnt++];
859       for (j=0; j<nt; j++) {
860         ierr = PetscSynchronizedPrintf(comm,"[%d] local node %d number of subdomains %d: ",rank,recvs2[cnt],recvs2[cnt+1]);CHKERRQ(ierr);
861         for (k=0; k<recvs2[cnt+1]; k++) {
862           ierr = PetscSynchronizedPrintf(comm,"%d ",recvs2[cnt+2+k]);CHKERRQ(ierr);
863         }
864         cnt += 2 + recvs2[cnt+1];
865         ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
866       }
867     }
868     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
869   } /* -----------------------------------  */
870 
871   /* count number subdomains for each local node */
872   ierr = PetscMalloc(size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr);
873   ierr = PetscMemzero(nprocs,size*sizeof(PetscInt));CHKERRQ(ierr);
874   cnt  = 0;
875   for (i=0; i<nrecvs2; i++) {
876     nt = recvs2[cnt++];
877     for (j=0; j<nt; j++) {
878       for (k=0; k<recvs2[cnt+1]; k++) {
879         nprocs[recvs2[cnt+2+k]]++;
880       }
881       cnt += 2 + recvs2[cnt+1];
882     }
883   }
884   nt = 0; for (i=0; i<size; i++) nt += (nprocs[i] > 0);
885   *nproc    = nt;
886   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),procs);CHKERRQ(ierr);
887   ierr = PetscMalloc((nt+1)*sizeof(PetscInt),numprocs);CHKERRQ(ierr);
888   ierr = PetscMalloc((nt+1)*sizeof(PetscInt*),indices);CHKERRQ(ierr);
889   for (i=0;i<nt+1;i++) { (*indices)[i]=PETSC_NULL; }
890   ierr = PetscMalloc(size*sizeof(PetscInt),&bprocs);CHKERRQ(ierr);
891   cnt       = 0;
892   for (i=0; i<size; i++) {
893     if (nprocs[i] > 0) {
894       bprocs[i]        = cnt;
895       (*procs)[cnt]    = i;
896       (*numprocs)[cnt] = nprocs[i];
897       ierr             = PetscMalloc(nprocs[i]*sizeof(PetscInt),&(*indices)[cnt]);CHKERRQ(ierr);
898       cnt++;
899     }
900   }
901 
902   /* make the list of subdomains for each nontrivial local node */
903   ierr = PetscMemzero(*numprocs,nt*sizeof(PetscInt));CHKERRQ(ierr);
904   cnt  = 0;
905   for (i=0; i<nrecvs2; i++) {
906     nt = recvs2[cnt++];
907     for (j=0; j<nt; j++) {
908       for (k=0; k<recvs2[cnt+1]; k++) {
909         (*indices)[bprocs[recvs2[cnt+2+k]]][(*numprocs)[bprocs[recvs2[cnt+2+k]]]++] = recvs2[cnt];
910       }
911       cnt += 2 + recvs2[cnt+1];
912     }
913   }
914   ierr = PetscFree(bprocs);CHKERRQ(ierr);
915   ierr = PetscFree(recvs2);CHKERRQ(ierr);
916 
917   /* sort the node indexing by their global numbers */
918   nt = *nproc;
919   for (i=0; i<nt; i++) {
920     ierr = PetscMalloc(((*numprocs)[i])*sizeof(PetscInt),&tmp);CHKERRQ(ierr);
921     for (j=0; j<(*numprocs)[i]; j++) {
922       tmp[j] = lindices[(*indices)[i][j]];
923     }
924     ierr = PetscSortIntWithArray((*numprocs)[i],tmp,(*indices)[i]);CHKERRQ(ierr);
925     ierr = PetscFree(tmp);CHKERRQ(ierr);
926   }
927 
928   if (debug) { /* -----------------------------------  */
929     nt = *nproc;
930     for (i=0; i<nt; i++) {
931       ierr = PetscSynchronizedPrintf(comm,"[%d] subdomain %d number of indices %d: ",rank,(*procs)[i],(*numprocs)[i]);CHKERRQ(ierr);
932       for (j=0; j<(*numprocs)[i]; j++) {
933         ierr = PetscSynchronizedPrintf(comm,"%d ",(*indices)[i][j]);CHKERRQ(ierr);
934       }
935       ierr = PetscSynchronizedPrintf(comm,"\n");CHKERRQ(ierr);
936     }
937     ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
938   } /* -----------------------------------  */
939 
940   /* wait on sends */
941   if (nsends2) {
942     ierr = PetscMalloc(nsends2*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
943     ierr = MPI_Waitall(nsends2,send_waits,send_status);CHKERRQ(ierr);
944     ierr = PetscFree(send_status);CHKERRQ(ierr);
945   }
946 
947   ierr = PetscFree(starts3);CHKERRQ(ierr);
948   ierr = PetscFree(dest);CHKERRQ(ierr);
949   ierr = PetscFree(send_waits);CHKERRQ(ierr);
950 
951   ierr = PetscFree(nownedsenders);CHKERRQ(ierr);
952   ierr = PetscFree(ownedsenders);CHKERRQ(ierr);
953   ierr = PetscFree(starts);CHKERRQ(ierr);
954   ierr = PetscFree(starts2);CHKERRQ(ierr);
955   ierr = PetscFree(lens2);CHKERRQ(ierr);
956 
957   ierr = PetscFree(source);CHKERRQ(ierr);
958   ierr = PetscFree(len);CHKERRQ(ierr);
959   ierr = PetscFree(recvs);CHKERRQ(ierr);
960   ierr = PetscFree(nprocs);CHKERRQ(ierr);
961   ierr = PetscFree(sends2);CHKERRQ(ierr);
962 
963   /* put the information about myself as the first entry in the list */
964   first_procs    = (*procs)[0];
965   first_numprocs = (*numprocs)[0];
966   first_indices  = (*indices)[0];
967   for (i=0; i<*nproc; i++) {
968     if ((*procs)[i] == rank) {
969       (*procs)[0]    = (*procs)[i];
970       (*numprocs)[0] = (*numprocs)[i];
971       (*indices)[0]  = (*indices)[i];
972       (*procs)[i]    = first_procs;
973       (*numprocs)[i] = first_numprocs;
974       (*indices)[i]  = first_indices;
975       break;
976     }
977   }
978   PetscFunctionReturn(0);
979 }
980 
981 #undef __FUNCT__
982 #define __FUNCT__ "ISLocalToGlobalMappingRestoreInfo"
983 /*@C
984     ISLocalToGlobalMappingRestoreInfo - Frees the memory allocated by ISLocalToGlobalMappingGetInfo()
985 
986     Collective on ISLocalToGlobalMapping
987 
988     Input Parameters:
989 .   mapping - the mapping from local to global indexing
990 
991     Output Parameter:
992 +   nproc - number of processors that are connected to this one
993 .   proc - neighboring processors
994 .   numproc - number of indices for each processor
995 -   indices - indices of local nodes shared with neighbor (sorted by global numbering)
996 
997     Level: advanced
998 
999 .seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreateIS(), ISLocalToGlobalMappingCreate(),
1000           ISLocalToGlobalMappingGetInfo()
1001 @*/
1002 PetscErrorCode  ISLocalToGlobalMappingRestoreInfo(ISLocalToGlobalMapping mapping,PetscInt *nproc,PetscInt *procs[],PetscInt *numprocs[],PetscInt **indices[])
1003 {
1004   PetscErrorCode ierr;
1005   PetscInt       i;
1006 
1007   PetscFunctionBegin;
1008   ierr = PetscFree(*procs);CHKERRQ(ierr);
1009   ierr = PetscFree(*numprocs);CHKERRQ(ierr);
1010   if (*indices) {
1011     ierr = PetscFree((*indices)[0]);CHKERRQ(ierr);
1012     for (i=1; i<*nproc; i++) {
1013       ierr = PetscFree((*indices)[i]);CHKERRQ(ierr);
1014     }
1015     ierr = PetscFree(*indices);CHKERRQ(ierr);
1016   }
1017   PetscFunctionReturn(0);
1018 }
1019 
1020 #undef __FUNCT__
1021 #define __FUNCT__ "ISLocalToGlobalMappingGetIndices"
1022 /*@C
1023    ISLocalToGlobalMappingGetIndices - Get global indices for every local point
1024 
1025    Not Collective
1026 
1027    Input Arguments:
1028 . ltog - local to global mapping
1029 
1030    Output Arguments:
1031 . array - array of indices
1032 
1033    Level: advanced
1034 
1035 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingRestoreIndices()
1036 @*/
1037 PetscErrorCode  ISLocalToGlobalMappingGetIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1038 {
1039   PetscFunctionBegin;
1040   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1041   PetscValidPointer(array,2);
1042   *array = ltog->indices;
1043   PetscFunctionReturn(0);
1044 }
1045 
1046 #undef __FUNCT__
1047 #define __FUNCT__ "ISLocalToGlobalMappingRestoreIndices"
1048 /*@C
1049    ISLocalToGlobalMappingRestoreIndices - Restore indices obtained with ISLocalToGlobalMappingRestoreIndices()
1050 
1051    Not Collective
1052 
1053    Input Arguments:
1054 + ltog - local to global mapping
1055 - array - array of indices
1056 
1057    Level: advanced
1058 
1059 .seealso: ISLocalToGlobalMappingCreate(), ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingGetIndices()
1060 @*/
1061 PetscErrorCode  ISLocalToGlobalMappingRestoreIndices(ISLocalToGlobalMapping ltog,const PetscInt **array)
1062 {
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,1);
1065   PetscValidPointer(array,2);
1066   if (*array != ltog->indices) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_BADPTR,"Trying to return mismatched pointer");
1067   *array = PETSC_NULL;
1068   PetscFunctionReturn(0);
1069 }
1070 
1071 #undef __FUNCT__
1072 #define __FUNCT__ "ISLocalToGlobalMappingConcatenate"
1073 /*@C
1074    ISLocalToGlobalMappingConcatenate - Create a new mapping that concatenates a list of mappings
1075 
1076    Not Collective
1077 
1078    Input Arguments:
1079 + comm - communicator for the new mapping, must contain the communicator of every mapping to concatenate
1080 . n - number of mappings to concatenate
1081 - ltogs - local to global mappings
1082 
1083    Output Arguments:
1084 . ltogcat - new mapping
1085 
1086    Level: advanced
1087 
1088 .seealso: ISLocalToGlobalMappingCreate()
1089 @*/
1090 PetscErrorCode ISLocalToGlobalMappingConcatenate(MPI_Comm comm,PetscInt n,const ISLocalToGlobalMapping ltogs[],ISLocalToGlobalMapping *ltogcat)
1091 {
1092   PetscInt       i,cnt,m,*idx;
1093   PetscErrorCode ierr;
1094 
1095   PetscFunctionBegin;
1096   if (n < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have a non-negative number of mappings, given %D",n);
1097   if (n > 0) PetscValidPointer(ltogs,3);
1098   for (i=0; i<n; i++) PetscValidHeaderSpecific(ltogs[i],IS_LTOGM_CLASSID,3);
1099   PetscValidPointer(ltogcat,4);
1100   for (cnt=0,i=0; i<n; i++) {
1101     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1102     cnt += m;
1103   }
1104   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1105   for (cnt=0,i=0; i<n; i++) {
1106     const PetscInt *subidx;
1107     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1108     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1109     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1110     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1111     cnt += m;
1112   }
1113   ierr = ISLocalToGlobalMappingCreate(comm,cnt,idx,PETSC_OWN_POINTER,ltogcat);CHKERRQ(ierr);
1114   PetscFunctionReturn(0);
1115 }
1116