xref: /petsc/src/vec/is/utils/pmap.c (revision 9621ec185aeb287fa1ab4770f9eb19f72a96b4be)
1 
2 /*
3    This file contains routines for basic map object implementation.
4 */
5 
6 #include <petscis.h> /*I "petscis.h" I*/
7 #include <petscsf.h>
8 #include <petsc/private/isimpl.h>
9 
10 /*@
11   PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default.
12 
13   Collective
14 
15   Input Parameters:
16 . comm - the MPI communicator
17 
18   Output Parameters:
19 . map - the new PetscLayout
20 
21   Level: advanced
22 
23   Notes:
24   Typical calling sequence
25 .vb
26        PetscLayoutCreate(MPI_Comm,PetscLayout *);
27        PetscLayoutSetBlockSize(PetscLayout,bs);
28        PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
29        PetscLayoutSetUp(PetscLayout);
30 .ve
31   Alternatively,
32 $      PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
33 
34   Optionally use any of the following:
35 
36 + PetscLayoutGetSize(PetscLayout,PetscInt *);
37 . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
38 . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
39 . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
40 - PetscLayoutDestroy(PetscLayout*);
41 
42   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
43   user codes unless you really gain something in their use.
44 
45 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
46           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(),
47           PetscLayoutCreateFromSizes()
48 
49 @*/
50 PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
51 {
52   PetscErrorCode ierr;
53 
54   PetscFunctionBegin;
55   ierr = PetscNew(map);CHKERRQ(ierr);
56 
57   (*map)->comm        = comm;
58   (*map)->bs          = -1;
59   (*map)->n           = -1;
60   (*map)->N           = -1;
61   (*map)->range       = NULL;
62   (*map)->range_alloc = PETSC_TRUE;
63   (*map)->rstart      = 0;
64   (*map)->rend        = 0;
65   PetscFunctionReturn(0);
66 }
67 
68 /*@
69   PetscLayoutCreateFromSizes - Allocates PetscLayout space, sets the layout sizes, and sets the layout up.
70 
71   Collective
72 
73   Input Parameters:
74 + comm  - the MPI communicator
75 . n     - the local size (or PETSC_DECIDE)
76 . N     - the global size (or PETSC_DECIDE)
77 . bs    - the block size (or PETSC_DECIDE)
78 
79   Output Parameters:
80 . map - the new PetscLayout
81 
82   Level: advanced
83 
84   Notes:
85 $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
86   is a shorthand for
87 .vb
88   PetscLayoutCreate(comm,&layout);
89   PetscLayoutSetLocalSize(layout,n);
90   PetscLayoutSetSize(layout,N);
91   PetscLayoutSetBlockSize(layout,bs);
92   PetscLayoutSetUp(layout);
93 .ve
94 
95 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
96           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromRanges()
97 
98 @*/
99 PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt bs,PetscLayout *map)
100 {
101   PetscErrorCode ierr;
102 
103   PetscFunctionBegin;
104   ierr = PetscLayoutCreate(comm, map);CHKERRQ(ierr);
105   ierr = PetscLayoutSetLocalSize(*map, n);CHKERRQ(ierr);
106   ierr = PetscLayoutSetSize(*map, N);CHKERRQ(ierr);
107   ierr = PetscLayoutSetBlockSize(*map, bs);CHKERRQ(ierr);
108   ierr = PetscLayoutSetUp(*map);CHKERRQ(ierr);
109   PetscFunctionReturn(0);
110 }
111 
112 /*@
113   PetscLayoutDestroy - Frees a map object and frees its range if that exists.
114 
115   Collective
116 
117   Input Parameters:
118 . map - the PetscLayout
119 
120   Level: developer
121 
122   Note:
123   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
124   recommended they not be used in user codes unless you really gain something in their use.
125 
126 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
127           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
128 
129 @*/
130 PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
131 {
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   if (!*map) PetscFunctionReturn(0);
136   if (!(*map)->refcnt--) {
137     if ((*map)->range_alloc) {ierr = PetscFree((*map)->range);CHKERRQ(ierr);}
138     ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
139     ierr = PetscFree((*map));CHKERRQ(ierr);
140   }
141   *map = NULL;
142   PetscFunctionReturn(0);
143 }
144 
145 /*@
146   PetscLayoutCreateFromRanges - Creates a new PetscLayout with the given ownership ranges and sets it up.
147 
148   Collective
149 
150   Input Parameters:
151 + comm  - the MPI communicator
152 . range - the array of ownership ranges for each rank with length commsize+1
153 . mode  - the copy mode for range
154 - bs    - the block size (or PETSC_DECIDE)
155 
156   Output Parameters:
157 . newmap - the new PetscLayout
158 
159   Level: developer
160 
161 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
162           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromSizes()
163 
164 @*/
165 PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm comm,const PetscInt range[],PetscCopyMode mode,PetscInt bs,PetscLayout *newmap)
166 {
167   PetscLayout    map;
168   PetscMPIInt    rank,size;
169   PetscErrorCode ierr;
170 
171   PetscFunctionBegin;
172   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
173   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
174   ierr = PetscLayoutCreate(comm, &map);CHKERRQ(ierr);
175   ierr = PetscLayoutSetBlockSize(map, bs);CHKERRQ(ierr);
176   switch (mode) {
177     case PETSC_COPY_VALUES:
178       ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
179       ierr = PetscArraycpy(map->range, range, size+1);CHKERRQ(ierr);
180       break;
181     case PETSC_USE_POINTER:
182       map->range_alloc = PETSC_FALSE;
183     default:
184       map->range = (PetscInt*) range;
185       break;
186   }
187   map->rstart = map->range[rank];
188   map->rend   = map->range[rank+1];
189   map->n      = map->rend - map->rstart;
190   map->N      = map->range[size];
191 #if defined(PETSC_USE_DEBUG)
192   /* just check that n, N and bs are consistent */
193   {
194     PetscInt tmp;
195     ierr = MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
196     if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
197   }
198   if (map->bs > 1) {
199     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
200   }
201   if (map->bs > 1) {
202     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
203   }
204 #endif
205   *newmap = map;
206   PetscFunctionReturn(0);
207 }
208 
209 /*@
210   PetscLayoutSetUp - given a map where you have set either the global or local
211                      size sets up the map so that it may be used.
212 
213   Collective
214 
215   Input Parameters:
216 . map - pointer to the map
217 
218   Level: developer
219 
220   Notes:
221     Typical calling sequence
222 $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
223 $ PetscLayoutSetBlockSize(PetscLayout,1);
224 $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
225 $ PetscLayoutSetUp(PetscLayout);
226 $ PetscLayoutGetSize(PetscLayout,PetscInt *);
227 
228   If range exists, and local size is not set, everything gets computed from the range.
229 
230   If the local size, global size are already set and range exists then this does nothing.
231 
232 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
233           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
234 @*/
235 PetscErrorCode PetscLayoutSetUp(PetscLayout map)
236 {
237   PetscMPIInt    rank,size;
238   PetscInt       p;
239   PetscErrorCode ierr;
240 
241   PetscFunctionBegin;
242   if ((map->n >= 0) && (map->N >= 0) && (map->range)) PetscFunctionReturn(0);
243 
244   if (map->n > 0 && map->bs > 1) {
245     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
246   }
247   if (map->N > 0 && map->bs > 1) {
248     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
249   }
250 
251   ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
252   ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
253   if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
254   if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
255   ierr = PetscSplitOwnership(map->comm,&map->n,&map->N);CHKERRQ(ierr);
256   map->n = map->n*PetscAbs(map->bs);
257   map->N = map->N*PetscAbs(map->bs);
258   if (!map->range) {
259     ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
260   }
261   ierr = MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);CHKERRQ(ierr);
262 
263   map->range[0] = 0;
264   for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];
265 
266   map->rstart = map->range[rank];
267   map->rend   = map->range[rank+1];
268   PetscFunctionReturn(0);
269 }
270 
271 /*@
272   PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.
273 
274   Collective on PetscLayout
275 
276   Input Parameter:
277 . in - input PetscLayout to be duplicated
278 
279   Output Parameter:
280 . out - the copy
281 
282   Level: developer
283 
284   Notes:
285     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
286 
287 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
288 @*/
289 PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
290 {
291   PetscMPIInt    size;
292   PetscErrorCode ierr;
293   MPI_Comm       comm = in->comm;
294 
295   PetscFunctionBegin;
296   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
297   ierr = PetscLayoutCreate(comm,out);CHKERRQ(ierr);
298   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
299   ierr = PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));CHKERRQ(ierr);
300   if (in->range) {
301     ierr = PetscMalloc1(size+1,&(*out)->range);CHKERRQ(ierr);
302     ierr = PetscArraycpy((*out)->range,in->range,size+1);CHKERRQ(ierr);
303   }
304 
305   (*out)->refcnt = 0;
306   PetscFunctionReturn(0);
307 }
308 
309 /*@
310   PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()
311 
312   Collective on PetscLayout
313 
314   Input Parameter:
315 . in - input PetscLayout to be copied
316 
317   Output Parameter:
318 . out - the reference location
319 
320   Level: developer
321 
322   Notes:
323     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
324 
325   If the out location already contains a PetscLayout it is destroyed
326 
327 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
328 @*/
329 PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   in->refcnt++;
335   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
336   *out = in;
337   PetscFunctionReturn(0);
338 }
339 
340 /*@
341   PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout
342 
343   Collective on PetscLayout
344 
345   Input Parameter:
346 + in - input PetscLayout
347 - ltog - the local to global mapping
348 
349 
350   Level: developer
351 
352   Notes:
353     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
354 
355   If the ltog location already contains a PetscLayout it is destroyed
356 
357 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
358 @*/
359 PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
360 {
361   PetscErrorCode ierr;
362   PetscInt       bs;
363 
364   PetscFunctionBegin;
365   ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr);
366   if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
367   ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
368   ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr);
369   in->mapping = ltog;
370   PetscFunctionReturn(0);
371 }
372 
373 /*@
374   PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.
375 
376   Collective on PetscLayout
377 
378   Input Parameters:
379 + map - pointer to the map
380 - n - the local size
381 
382   Level: developer
383 
384   Notes:
385   Call this after the call to PetscLayoutCreate()
386 
387 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
388           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
389 @*/
390 PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
391 {
392   PetscFunctionBegin;
393   if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
394   map->n = n;
395   PetscFunctionReturn(0);
396 }
397 
398 /*@C
399      PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.
400 
401     Not Collective
402 
403    Input Parameters:
404 .    map - pointer to the map
405 
406    Output Parameters:
407 .    n - the local size
408 
409    Level: developer
410 
411     Notes:
412        Call this after the call to PetscLayoutSetUp()
413 
414     Fortran Notes:
415       Not available from Fortran
416 
417 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
418           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
419 
420 @*/
421 PetscErrorCode  PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
422 {
423   PetscFunctionBegin;
424   *n = map->n;
425   PetscFunctionReturn(0);
426 }
427 
428 /*@
429   PetscLayoutSetSize - Sets the global size for a PetscLayout object.
430 
431   Logically Collective on PetscLayout
432 
433   Input Parameters:
434 + map - pointer to the map
435 - n - the global size
436 
437   Level: developer
438 
439   Notes:
440   Call this after the call to PetscLayoutCreate()
441 
442 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
443           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
444 @*/
445 PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
446 {
447   PetscFunctionBegin;
448   map->N = n;
449   PetscFunctionReturn(0);
450 }
451 
452 /*@
453   PetscLayoutGetSize - Gets the global size for a PetscLayout object.
454 
455   Not Collective
456 
457   Input Parameters:
458 . map - pointer to the map
459 
460   Output Parameters:
461 . n - the global size
462 
463   Level: developer
464 
465   Notes:
466   Call this after the call to PetscLayoutSetUp()
467 
468 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
469           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
470 @*/
471 PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
472 {
473   PetscFunctionBegin;
474   *n = map->N;
475   PetscFunctionReturn(0);
476 }
477 
478 /*@
479   PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.
480 
481   Logically Collective on PetscLayout
482 
483   Input Parameters:
484 + map - pointer to the map
485 - bs - the size
486 
487   Level: developer
488 
489   Notes:
490   Call this after the call to PetscLayoutCreate()
491 
492 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
493           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
494 @*/
495 PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
496 {
497   PetscFunctionBegin;
498   if (bs < 0) PetscFunctionReturn(0);
499   if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
500   if (map->mapping) {
501     PetscInt       obs;
502     PetscErrorCode ierr;
503 
504     ierr = ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);CHKERRQ(ierr);
505     if (obs > 1) {
506       ierr = ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);CHKERRQ(ierr);
507     }
508   }
509   map->bs = bs;
510   PetscFunctionReturn(0);
511 }
512 
513 /*@
514   PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.
515 
516   Not Collective
517 
518   Input Parameters:
519 . map - pointer to the map
520 
521   Output Parameters:
522 . bs - the size
523 
524   Level: developer
525 
526   Notes:
527   Call this after the call to PetscLayoutSetUp()
528 
529 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
530           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
531 @*/
532 PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
533 {
534   PetscFunctionBegin;
535   *bs = PetscAbs(map->bs);
536   PetscFunctionReturn(0);
537 }
538 
539 /*@
540   PetscLayoutGetRange - gets the range of values owned by this process
541 
542   Not Collective
543 
544   Input Parameters:
545 . map - pointer to the map
546 
547   Output Parameters:
548 + rstart - first index owned by this process
549 - rend   - one more than the last index owned by this process
550 
551   Level: developer
552 
553   Notes:
554   Call this after the call to PetscLayoutSetUp()
555 
556 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
557           PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
558 @*/
559 PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
560 {
561   PetscFunctionBegin;
562   if (rstart) *rstart = map->rstart;
563   if (rend)   *rend   = map->rend;
564   PetscFunctionReturn(0);
565 }
566 
567 /*@C
568      PetscLayoutGetRanges - gets the range of values owned by all processes
569 
570     Not Collective
571 
572    Input Parameters:
573 .    map - pointer to the map
574 
575    Output Parameters:
576 .    range - start of each processors range of indices (the final entry is one more then the
577              last index on the last process)
578 
579    Level: developer
580 
581     Notes:
582        Call this after the call to PetscLayoutSetUp()
583 
584     Fortran Notes:
585       Not available from Fortran
586 
587 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
588           PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
589 
590 @*/
591 PetscErrorCode  PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
592 {
593   PetscFunctionBegin;
594   *range = map->range;
595   PetscFunctionReturn(0);
596 }
597 
598 /*@C
599    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
600 
601    Collective
602 
603    Input Arguments:
604 +  sf - star forest
605 .  layout - PetscLayout defining the global space
606 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
607 .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
608 .  localmode - copy mode for ilocal
609 -  iremote - remote locations of root vertices for each leaf on the current process
610 
611    Level: intermediate
612 
613    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
614    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
615    needed
616 
617 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
618 @*/
619 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
620 {
621   PetscErrorCode ierr;
622   PetscInt       i,nroots;
623   PetscSFNode    *remote;
624 
625   PetscFunctionBegin;
626   ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
627   ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr);
628   for (i=0; i<nleaves; i++) {
629     PetscInt owner = -1;
630     ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr);
631     remote[i].rank  = owner;
632     remote[i].index = iremote[i] - layout->range[owner];
633   }
634   ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
635   PetscFunctionReturn(0);
636 }
637 
638 /*@
639   PetscLayoutCompare - Compares two layouts
640 
641   Not Collective
642 
643   Input Parameters:
644 + mapa - pointer to the first map
645 - mapb - pointer to the second map
646 
647   Output Parameters:
648 . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise
649 
650   Level: beginner
651 
652   Notes:
653 
654 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
655           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
656 @*/
657 PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
658 {
659   PetscErrorCode ierr;
660   PetscMPIInt    sizea,sizeb;
661 
662   PetscFunctionBegin;
663   *congruent = PETSC_FALSE;
664   ierr = MPI_Comm_size(mapa->comm,&sizea);CHKERRQ(ierr);
665   ierr = MPI_Comm_size(mapb->comm,&sizeb);CHKERRQ(ierr);
666   if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
667     ierr = PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);CHKERRQ(ierr);
668   }
669   PetscFunctionReturn(0);
670 }
671 
672 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
673 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
674 {
675   PetscInt      *owners = map->range;
676   PetscInt       n      = map->n;
677   PetscSF        sf;
678   PetscInt      *lidxs,*work = NULL;
679   PetscSFNode   *ridxs;
680   PetscMPIInt    rank;
681   PetscInt       r, p = 0, len = 0;
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
686   /* Create SF where leaves are input idxs and roots are owned idxs */
687   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
688   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
689   for (r = 0; r < n; ++r) lidxs[r] = -1;
690   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
691   for (r = 0; r < N; ++r) {
692     const PetscInt idx = idxs[r];
693     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
694     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
695       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
696     }
697     ridxs[r].rank = p;
698     ridxs[r].index = idxs[r] - owners[p];
699   }
700   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
701   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
702   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
703   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
704   if (ogidxs) { /* communicate global idxs */
705     PetscInt cum = 0,start,*work2;
706 
707     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
708     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
709     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
710     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
711     start -= cum;
712     cum = 0;
713     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
714     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
715     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
716     ierr = PetscFree(work2);CHKERRQ(ierr);
717   }
718   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
719   /* Compress and put in indices */
720   for (r = 0; r < n; ++r)
721     if (lidxs[r] >= 0) {
722       if (work) work[len] = work[r];
723       lidxs[len++] = r;
724     }
725   if (on) *on = len;
726   if (oidxs) *oidxs = lidxs;
727   if (ogidxs) *ogidxs = work;
728   PetscFunctionReturn(0);
729 }
730