xref: /petsc/src/vec/is/utils/pmap.c (revision 0e03b746557e2551025fde0294144c0532d12f68)
1 
2 /*
3    This file contains routines for basic map object implementation.
4 */
5 
6 #include <petscis.h> /*I "petscis.h" I*/
7 #include <petscsf.h>
8 #include <petsc/private/isimpl.h>
9 
10 /*@
11   PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default.
12 
13   Collective
14 
15   Input Parameters:
16 . comm - the MPI communicator
17 
18   Output Parameters:
19 . map - the new PetscLayout
20 
21   Level: advanced
22 
23   Notes:
24   Typical calling sequence
25 .vb
26        PetscLayoutCreate(MPI_Comm,PetscLayout *);
27        PetscLayoutSetBlockSize(PetscLayout,bs);
28        PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
29        PetscLayoutSetUp(PetscLayout);
30 .ve
31   Alternatively,
32 $      PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
33 
34   Optionally use any of the following:
35 
36 + PetscLayoutGetSize(PetscLayout,PetscInt *);
37 . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
38 . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
39 . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
40 - PetscLayoutDestroy(PetscLayout*);
41 
42   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
43   user codes unless you really gain something in their use.
44 
45 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
46           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(),
47           PetscLayoutCreateFromSizes()
48 
49 @*/
50 PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
51 {
52   PetscErrorCode ierr;
53 
54   PetscFunctionBegin;
55   ierr = PetscNew(map);CHKERRQ(ierr);
56 
57   (*map)->comm        = comm;
58   (*map)->bs          = -1;
59   (*map)->n           = -1;
60   (*map)->N           = -1;
61   (*map)->range       = NULL;
62   (*map)->range_alloc = PETSC_TRUE;
63   (*map)->rstart      = 0;
64   (*map)->rend        = 0;
65   (*map)->setupcalled = PETSC_FALSE;
66   (*map)->oldn        = -1;
67   (*map)->oldN        = -1;
68   (*map)->oldbs       = -1;
69   PetscFunctionReturn(0);
70 }
71 
72 /*@
73   PetscLayoutCreateFromSizes - Allocates PetscLayout space, sets the layout sizes, and sets the layout up.
74 
75   Collective
76 
77   Input Parameters:
78 + comm  - the MPI communicator
79 . n     - the local size (or PETSC_DECIDE)
80 . N     - the global size (or PETSC_DECIDE)
81 - bs    - the block size (or PETSC_DECIDE)
82 
83   Output Parameters:
84 . map - the new PetscLayout
85 
86   Level: advanced
87 
88   Notes:
89 $ PetscLayoutCreateFromSizes(comm,n,N,bs,&layout);
90   is a shorthand for
91 .vb
92   PetscLayoutCreate(comm,&layout);
93   PetscLayoutSetLocalSize(layout,n);
94   PetscLayoutSetSize(layout,N);
95   PetscLayoutSetBlockSize(layout,bs);
96   PetscLayoutSetUp(layout);
97 .ve
98 
99 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
100           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromRanges()
101 
102 @*/
103 PetscErrorCode PetscLayoutCreateFromSizes(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt bs,PetscLayout *map)
104 {
105   PetscErrorCode ierr;
106 
107   PetscFunctionBegin;
108   ierr = PetscLayoutCreate(comm, map);CHKERRQ(ierr);
109   ierr = PetscLayoutSetLocalSize(*map, n);CHKERRQ(ierr);
110   ierr = PetscLayoutSetSize(*map, N);CHKERRQ(ierr);
111   ierr = PetscLayoutSetBlockSize(*map, bs);CHKERRQ(ierr);
112   ierr = PetscLayoutSetUp(*map);CHKERRQ(ierr);
113   PetscFunctionReturn(0);
114 }
115 
116 /*@
117   PetscLayoutDestroy - Frees a map object and frees its range if that exists.
118 
119   Collective
120 
121   Input Parameters:
122 . map - the PetscLayout
123 
124   Level: developer
125 
126   Note:
127   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
128   recommended they not be used in user codes unless you really gain something in their use.
129 
130 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
131           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
132 
133 @*/
134 PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
135 {
136   PetscErrorCode ierr;
137 
138   PetscFunctionBegin;
139   if (!*map) PetscFunctionReturn(0);
140   if (!(*map)->refcnt--) {
141     if ((*map)->range_alloc) {ierr = PetscFree((*map)->range);CHKERRQ(ierr);}
142     ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
143     ierr = PetscFree((*map));CHKERRQ(ierr);
144   }
145   *map = NULL;
146   PetscFunctionReturn(0);
147 }
148 
149 /*@
150   PetscLayoutCreateFromRanges - Creates a new PetscLayout with the given ownership ranges and sets it up.
151 
152   Collective
153 
154   Input Parameters:
155 + comm  - the MPI communicator
156 . range - the array of ownership ranges for each rank with length commsize+1
157 . mode  - the copy mode for range
158 - bs    - the block size (or PETSC_DECIDE)
159 
160   Output Parameters:
161 . newmap - the new PetscLayout
162 
163   Level: developer
164 
165 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
166           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp(), PetscLayoutCreateFromSizes()
167 
168 @*/
169 PetscErrorCode PetscLayoutCreateFromRanges(MPI_Comm comm,const PetscInt range[],PetscCopyMode mode,PetscInt bs,PetscLayout *newmap)
170 {
171   PetscLayout    map;
172   PetscMPIInt    rank,size;
173   PetscErrorCode ierr;
174 
175   PetscFunctionBegin;
176   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
177   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
178   ierr = PetscLayoutCreate(comm, &map);CHKERRQ(ierr);
179   ierr = PetscLayoutSetBlockSize(map, bs);CHKERRQ(ierr);
180   switch (mode) {
181     case PETSC_COPY_VALUES:
182       ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
183       ierr = PetscArraycpy(map->range, range, size+1);CHKERRQ(ierr);
184       break;
185     case PETSC_USE_POINTER:
186       map->range_alloc = PETSC_FALSE;
187     default:
188       map->range = (PetscInt*) range;
189       break;
190   }
191   map->rstart = map->range[rank];
192   map->rend   = map->range[rank+1];
193   map->n      = map->rend - map->rstart;
194   map->N      = map->range[size];
195 #if defined(PETSC_USE_DEBUG)
196   /* just check that n, N and bs are consistent */
197   {
198     PetscInt tmp;
199     ierr = MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
200     if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
201   }
202   if (map->bs > 1) {
203     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
204   }
205   if (map->bs > 1) {
206     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
207   }
208 #endif
209   /* lock the layout */
210   map->setupcalled = PETSC_TRUE;
211   map->oldn = map->n;
212   map->oldN = map->N;
213   map->oldbs = map->bs;
214   *newmap = map;
215   PetscFunctionReturn(0);
216 }
217 
218 /*@
219   PetscLayoutSetUp - given a map where you have set either the global or local
220                      size sets up the map so that it may be used.
221 
222   Collective
223 
224   Input Parameters:
225 . map - pointer to the map
226 
227   Level: developer
228 
229   Notes:
230     Typical calling sequence
231 $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
232 $ PetscLayoutSetBlockSize(PetscLayout,1);
233 $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
234 $ PetscLayoutSetUp(PetscLayout);
235 $ PetscLayoutGetSize(PetscLayout,PetscInt *);
236 
237   If range exists, and local size is not set, everything gets computed from the range.
238 
239   If the local size, global size are already set and range exists then this does nothing.
240 
241 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
242           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
243 @*/
244 PetscErrorCode PetscLayoutSetUp(PetscLayout map)
245 {
246   PetscMPIInt    rank,size;
247   PetscInt       p;
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   if (map->setupcalled && (map->n != map->oldn || map->N != map->oldN)) SETERRQ4(map->comm,PETSC_ERR_ARG_WRONGSTATE,"Layout is already setup with (local=%D,global=%D), cannot call setup again with (local=%D,global=%D)", map->oldn, map->oldN, map->n, map->N);
252   if (map->setupcalled) PetscFunctionReturn(0);
253 
254   if (map->n > 0 && map->bs > 1) {
255     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
256   }
257   if (map->N > 0 && map->bs > 1) {
258     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
259   }
260 
261   ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
262   ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
263   if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
264   if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
265   ierr = PetscSplitOwnership(map->comm,&map->n,&map->N);CHKERRQ(ierr);
266   map->n = map->n*PetscAbs(map->bs);
267   map->N = map->N*PetscAbs(map->bs);
268   if (!map->range) {
269     ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
270   }
271   ierr = MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);CHKERRQ(ierr);
272 
273   map->range[0] = 0;
274   for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];
275 
276   map->rstart = map->range[rank];
277   map->rend   = map->range[rank+1];
278 
279   /* lock the layout */
280   map->setupcalled = PETSC_TRUE;
281   map->oldn = map->n;
282   map->oldN = map->N;
283   map->oldbs = map->bs;
284   PetscFunctionReturn(0);
285 }
286 
287 /*@
288   PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.
289 
290   Collective on PetscLayout
291 
292   Input Parameter:
293 . in - input PetscLayout to be duplicated
294 
295   Output Parameter:
296 . out - the copy
297 
298   Level: developer
299 
300   Notes:
301     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
302 
303 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
304 @*/
305 PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
306 {
307   PetscMPIInt    size;
308   PetscErrorCode ierr;
309   MPI_Comm       comm = in->comm;
310 
311   PetscFunctionBegin;
312   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
313   ierr = PetscLayoutCreate(comm,out);CHKERRQ(ierr);
314   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
315   ierr = PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));CHKERRQ(ierr);
316   if (in->range) {
317     ierr = PetscMalloc1(size+1,&(*out)->range);CHKERRQ(ierr);
318     ierr = PetscArraycpy((*out)->range,in->range,size+1);CHKERRQ(ierr);
319   }
320 
321   (*out)->refcnt = 0;
322   PetscFunctionReturn(0);
323 }
324 
325 /*@
326   PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()
327 
328   Collective on PetscLayout
329 
330   Input Parameter:
331 . in - input PetscLayout to be copied
332 
333   Output Parameter:
334 . out - the reference location
335 
336   Level: developer
337 
338   Notes:
339     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
340 
341   If the out location already contains a PetscLayout it is destroyed
342 
343 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
344 @*/
345 PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
346 {
347   PetscErrorCode ierr;
348 
349   PetscFunctionBegin;
350   in->refcnt++;
351   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
352   *out = in;
353   PetscFunctionReturn(0);
354 }
355 
356 /*@
357   PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout
358 
359   Collective on PetscLayout
360 
361   Input Parameter:
362 + in - input PetscLayout
363 - ltog - the local to global mapping
364 
365 
366   Level: developer
367 
368   Notes:
369     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
370 
371   If the ltog location already contains a PetscLayout it is destroyed
372 
373 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
374 @*/
375 PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
376 {
377   PetscErrorCode ierr;
378   PetscInt       bs;
379 
380   PetscFunctionBegin;
381   ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr);
382   if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
383   ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
384   ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr);
385   in->mapping = ltog;
386   PetscFunctionReturn(0);
387 }
388 
389 /*@
390   PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.
391 
392   Collective on PetscLayout
393 
394   Input Parameters:
395 + map - pointer to the map
396 - n - the local size
397 
398   Level: developer
399 
400   Notes:
401   Call this after the call to PetscLayoutCreate()
402 
403 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
404           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
405 @*/
406 PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
407 {
408   PetscFunctionBegin;
409   if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
410   map->n = n;
411   PetscFunctionReturn(0);
412 }
413 
414 /*@C
415      PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.
416 
417     Not Collective
418 
419    Input Parameters:
420 .    map - pointer to the map
421 
422    Output Parameters:
423 .    n - the local size
424 
425    Level: developer
426 
427     Notes:
428        Call this after the call to PetscLayoutSetUp()
429 
430     Fortran Notes:
431       Not available from Fortran
432 
433 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
434           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
435 
436 @*/
437 PetscErrorCode  PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
438 {
439   PetscFunctionBegin;
440   *n = map->n;
441   PetscFunctionReturn(0);
442 }
443 
444 /*@
445   PetscLayoutSetSize - Sets the global size for a PetscLayout object.
446 
447   Logically Collective on PetscLayout
448 
449   Input Parameters:
450 + map - pointer to the map
451 - n - the global size
452 
453   Level: developer
454 
455   Notes:
456   Call this after the call to PetscLayoutCreate()
457 
458 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
459           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
460 @*/
461 PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
462 {
463   PetscFunctionBegin;
464   map->N = n;
465   PetscFunctionReturn(0);
466 }
467 
468 /*@
469   PetscLayoutGetSize - Gets the global size for a PetscLayout object.
470 
471   Not Collective
472 
473   Input Parameters:
474 . map - pointer to the map
475 
476   Output Parameters:
477 . n - the global size
478 
479   Level: developer
480 
481   Notes:
482   Call this after the call to PetscLayoutSetUp()
483 
484 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
485           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
486 @*/
487 PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
488 {
489   PetscFunctionBegin;
490   *n = map->N;
491   PetscFunctionReturn(0);
492 }
493 
494 /*@
495   PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.
496 
497   Logically Collective on PetscLayout
498 
499   Input Parameters:
500 + map - pointer to the map
501 - bs - the size
502 
503   Level: developer
504 
505   Notes:
506   Call this after the call to PetscLayoutCreate()
507 
508 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
509           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
510 @*/
511 PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
512 {
513   PetscFunctionBegin;
514   if (bs < 0) PetscFunctionReturn(0);
515   if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
516   if (map->mapping) {
517     PetscInt       obs;
518     PetscErrorCode ierr;
519 
520     ierr = ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);CHKERRQ(ierr);
521     if (obs > 1) {
522       ierr = ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);CHKERRQ(ierr);
523     }
524   }
525   map->bs = bs;
526   PetscFunctionReturn(0);
527 }
528 
529 /*@
530   PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.
531 
532   Not Collective
533 
534   Input Parameters:
535 . map - pointer to the map
536 
537   Output Parameters:
538 . bs - the size
539 
540   Level: developer
541 
542   Notes:
543   Call this after the call to PetscLayoutSetUp()
544 
545 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
546           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
547 @*/
548 PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
549 {
550   PetscFunctionBegin;
551   *bs = PetscAbs(map->bs);
552   PetscFunctionReturn(0);
553 }
554 
555 /*@
556   PetscLayoutGetRange - gets the range of values owned by this process
557 
558   Not Collective
559 
560   Input Parameters:
561 . map - pointer to the map
562 
563   Output Parameters:
564 + rstart - first index owned by this process
565 - rend   - one more than the last index owned by this process
566 
567   Level: developer
568 
569   Notes:
570   Call this after the call to PetscLayoutSetUp()
571 
572 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
573           PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
574 @*/
575 PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
576 {
577   PetscFunctionBegin;
578   if (rstart) *rstart = map->rstart;
579   if (rend)   *rend   = map->rend;
580   PetscFunctionReturn(0);
581 }
582 
583 /*@C
584      PetscLayoutGetRanges - gets the range of values owned by all processes
585 
586     Not Collective
587 
588    Input Parameters:
589 .    map - pointer to the map
590 
591    Output Parameters:
592 .    range - start of each processors range of indices (the final entry is one more then the
593              last index on the last process)
594 
595    Level: developer
596 
597     Notes:
598        Call this after the call to PetscLayoutSetUp()
599 
600     Fortran Notes:
601       Not available from Fortran
602 
603 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
604           PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
605 
606 @*/
607 PetscErrorCode  PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
608 {
609   PetscFunctionBegin;
610   *range = map->range;
611   PetscFunctionReturn(0);
612 }
613 
614 /*@C
615    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
616 
617    Collective
618 
619    Input Arguments:
620 +  sf - star forest
621 .  layout - PetscLayout defining the global space
622 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
623 .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
624 .  localmode - copy mode for ilocal
625 -  iremote - remote locations of root vertices for each leaf on the current process
626 
627    Level: intermediate
628 
629    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
630    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
631    needed
632 
633 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
634 @*/
635 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
636 {
637   PetscErrorCode ierr;
638   PetscInt       i,nroots;
639   PetscSFNode    *remote;
640 
641   PetscFunctionBegin;
642   ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
643   ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr);
644   for (i=0; i<nleaves; i++) {
645     PetscMPIInt owner = -1;
646     ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr);
647     remote[i].rank  = owner;
648     remote[i].index = iremote[i] - layout->range[owner];
649   }
650   ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
651   PetscFunctionReturn(0);
652 }
653 
654 /*@
655   PetscLayoutCompare - Compares two layouts
656 
657   Not Collective
658 
659   Input Parameters:
660 + mapa - pointer to the first map
661 - mapb - pointer to the second map
662 
663   Output Parameters:
664 . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise
665 
666   Level: beginner
667 
668   Notes:
669 
670 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
671           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
672 @*/
673 PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
674 {
675   PetscErrorCode ierr;
676   PetscMPIInt    sizea,sizeb;
677 
678   PetscFunctionBegin;
679   *congruent = PETSC_FALSE;
680   ierr = MPI_Comm_size(mapa->comm,&sizea);CHKERRQ(ierr);
681   ierr = MPI_Comm_size(mapb->comm,&sizeb);CHKERRQ(ierr);
682   if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
683     ierr = PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);CHKERRQ(ierr);
684   }
685   PetscFunctionReturn(0);
686 }
687 
688 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
689 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
690 {
691   PetscInt      *owners = map->range;
692   PetscInt       n      = map->n;
693   PetscSF        sf;
694   PetscInt      *lidxs,*work = NULL;
695   PetscSFNode   *ridxs;
696   PetscMPIInt    rank, p = 0;
697   PetscInt       r, len = 0;
698   PetscErrorCode ierr;
699 
700   PetscFunctionBegin;
701   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
702   /* Create SF where leaves are input idxs and roots are owned idxs */
703   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
704   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
705   for (r = 0; r < n; ++r) lidxs[r] = -1;
706   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
707   for (r = 0; r < N; ++r) {
708     const PetscInt idx = idxs[r];
709     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
710     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
711       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
712     }
713     ridxs[r].rank = p;
714     ridxs[r].index = idxs[r] - owners[p];
715   }
716   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
717   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
718   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
719   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
720   if (ogidxs) { /* communicate global idxs */
721     PetscInt cum = 0,start,*work2;
722 
723     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
724     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
725     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
726     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
727     start -= cum;
728     cum = 0;
729     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
730     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
731     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
732     ierr = PetscFree(work2);CHKERRQ(ierr);
733   }
734   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
735   /* Compress and put in indices */
736   for (r = 0; r < n; ++r)
737     if (lidxs[r] >= 0) {
738       if (work) work[len] = work[r];
739       lidxs[len++] = r;
740     }
741   if (on) *on = len;
742   if (oidxs) *oidxs = lidxs;
743   if (ogidxs) *ogidxs = work;
744   PetscFunctionReturn(0);
745 }
746