xref: /petsc/src/vec/is/utils/pmap.c (revision c168f6d8e6d113b97645dfd96fa277c9c2372c3a)
1 
2 /*
3    This file contains routines for basic map object implementation.
4 */
5 
6 #include <petscis.h> /*I "petscis.h" I*/
7 #include <petscsf.h>
8 #include <petsc/private/isimpl.h>
9 
10 /*@
11   PetscLayoutCreate - Allocates PetscLayout space and sets the PetscLayout contents to the default.
12 
13   Collective
14 
15   Input Parameters:
16 . comm - the MPI communicator
17 
18   Output Parameters:
19 . map - the new PetscLayout
20 
21   Level: advanced
22 
23   Notes:
24   Typical calling sequence
25 .vb
26        PetscLayoutCreate(MPI_Comm,PetscLayout *);
27        PetscLayoutSetBlockSize(PetscLayout,bs);
28        PetscLayoutSetSize(PetscLayout,N); // or PetscLayoutSetLocalSize(PetscLayout,n);
29        PetscLayoutSetUp(PetscLayout);
30 .ve
31   Optionally use any of the following:
32 
33 + PetscLayoutGetSize(PetscLayout,PetscInt *);
34 . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
35 . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
36 . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
37 - PetscLayoutDestroy(PetscLayout*);
38 
39   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
40   user codes unless you really gain something in their use.
41 
42 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
43           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
44 
45 @*/
46 PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
47 {
48   PetscErrorCode ierr;
49 
50   PetscFunctionBegin;
51   ierr = PetscNew(map);CHKERRQ(ierr);
52 
53   (*map)->comm   = comm;
54   (*map)->bs     = -1;
55   (*map)->n      = -1;
56   (*map)->N      = -1;
57   (*map)->range  = NULL;
58   (*map)->rstart = 0;
59   (*map)->rend   = 0;
60   PetscFunctionReturn(0);
61 }
62 
63 /*@
64   PetscLayoutDestroy - Frees a map object and frees its range if that exists.
65 
66   Collective
67 
68   Input Parameters:
69 . map - the PetscLayout
70 
71   Level: developer
72 
73   Note:
74   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
75   recommended they not be used in user codes unless you really gain something in their use.
76 
77 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
78           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
79 
80 @*/
81 PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
82 {
83   PetscErrorCode ierr;
84 
85   PetscFunctionBegin;
86   if (!*map) PetscFunctionReturn(0);
87   if (!(*map)->refcnt--) {
88     ierr = PetscFree((*map)->range);CHKERRQ(ierr);
89     ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
90     ierr = PetscFree((*map));CHKERRQ(ierr);
91   }
92   *map = NULL;
93   PetscFunctionReturn(0);
94 }
95 
96 static PetscErrorCode PetscLayoutSetUp_SizesFromRanges_Private(PetscLayout map)
97 {
98   PetscMPIInt    rank,size;
99   PetscErrorCode ierr;
100 
101   PetscFunctionBegin;
102   ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
103   ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
104   map->rstart = map->range[rank];
105   map->rend   = map->range[rank+1];
106   map->n      = map->rend - map->rstart;
107   map->N      = map->range[size];
108 #if defined(PETSC_USE_DEBUG)
109   /* just check that n, N and bs are consistent */
110   {
111     PetscInt tmp;
112     ierr = MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
113     if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
114   }
115   if (map->bs > 1) {
116     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
117   }
118   if (map->bs > 1) {
119     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
120   }
121 #endif
122   PetscFunctionReturn(0);
123 }
124 
125 /*@
126   PetscLayoutSetUp - given a map where you have set either the global or local
127                      size sets up the map so that it may be used.
128 
129   Collective
130 
131   Input Parameters:
132 . map - pointer to the map
133 
134   Level: developer
135 
136   Notes:
137     Typical calling sequence
138 $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
139 $ PetscLayoutSetBlockSize(PetscLayout,1);
140 $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
141 $ PetscLayoutSetUp(PetscLayout);
142 $ PetscLayoutGetSize(PetscLayout,PetscInt *);
143 
144   If range exists, and local size is not set, everything gets computed from the range.
145 
146   If the local size, global size are already set and range exists then this does nothing.
147 
148 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
149           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
150 @*/
151 PetscErrorCode PetscLayoutSetUp(PetscLayout map)
152 {
153   PetscMPIInt    rank,size;
154   PetscInt       p;
155   PetscErrorCode ierr;
156 
157   PetscFunctionBegin;
158   if ((map->n >= 0) && (map->N >= 0) && (map->range)) PetscFunctionReturn(0);
159   if (map->range && map->n < 0) {
160     ierr = PetscLayoutSetUp_SizesFromRanges_Private(map);CHKERRQ(ierr);
161     PetscFunctionReturn(0);
162   }
163 
164   if (map->n > 0 && map->bs > 1) {
165     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
166   }
167   if (map->N > 0 && map->bs > 1) {
168     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
169   }
170 
171   ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
172   ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
173   if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
174   if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
175   ierr = PetscSplitOwnership(map->comm,&map->n,&map->N);CHKERRQ(ierr);
176   map->n = map->n*PetscAbs(map->bs);
177   map->N = map->N*PetscAbs(map->bs);
178   if (!map->range) {
179     ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
180   }
181   ierr = MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);CHKERRQ(ierr);
182 
183   map->range[0] = 0;
184   for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];
185 
186   map->rstart = map->range[rank];
187   map->rend   = map->range[rank+1];
188   PetscFunctionReturn(0);
189 }
190 
191 /*@
192   PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.
193 
194   Collective on PetscLayout
195 
196   Input Parameter:
197 . in - input PetscLayout to be duplicated
198 
199   Output Parameter:
200 . out - the copy
201 
202   Level: developer
203 
204   Notes:
205     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
206 
207 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
208 @*/
209 PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
210 {
211   PetscMPIInt    size;
212   PetscErrorCode ierr;
213   MPI_Comm       comm = in->comm;
214 
215   PetscFunctionBegin;
216   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
217   ierr = PetscLayoutCreate(comm,out);CHKERRQ(ierr);
218   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
219   ierr = PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));CHKERRQ(ierr);
220   if (in->range) {
221     ierr = PetscMalloc1(size+1,&(*out)->range);CHKERRQ(ierr);
222     ierr = PetscArraycpy((*out)->range,in->range,size+1);CHKERRQ(ierr);
223   }
224 
225   (*out)->refcnt = 0;
226   PetscFunctionReturn(0);
227 }
228 
229 /*@
230   PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()
231 
232   Collective on PetscLayout
233 
234   Input Parameter:
235 . in - input PetscLayout to be copied
236 
237   Output Parameter:
238 . out - the reference location
239 
240   Level: developer
241 
242   Notes:
243     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
244 
245   If the out location already contains a PetscLayout it is destroyed
246 
247 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
248 @*/
249 PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
250 {
251   PetscErrorCode ierr;
252 
253   PetscFunctionBegin;
254   in->refcnt++;
255   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
256   *out = in;
257   PetscFunctionReturn(0);
258 }
259 
260 /*@
261   PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout
262 
263   Collective on PetscLayout
264 
265   Input Parameter:
266 + in - input PetscLayout
267 - ltog - the local to global mapping
268 
269 
270   Level: developer
271 
272   Notes:
273     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
274 
275   If the ltog location already contains a PetscLayout it is destroyed
276 
277 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
278 @*/
279 PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
280 {
281   PetscErrorCode ierr;
282   PetscInt       bs;
283 
284   PetscFunctionBegin;
285   ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr);
286   if (in->bs > 0 && (bs != 1) && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D (or the latter must be 1)",in->bs,bs);
287   ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
288   ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr);
289   in->mapping = ltog;
290   PetscFunctionReturn(0);
291 }
292 
293 /*@
294   PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.
295 
296   Collective on PetscLayout
297 
298   Input Parameters:
299 + map - pointer to the map
300 - n - the local size
301 
302   Level: developer
303 
304   Notes:
305   Call this after the call to PetscLayoutCreate()
306 
307 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
308           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
309 @*/
310 PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
311 {
312   PetscFunctionBegin;
313   if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
314   map->n = n;
315   PetscFunctionReturn(0);
316 }
317 
318 /*@C
319      PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.
320 
321     Not Collective
322 
323    Input Parameters:
324 .    map - pointer to the map
325 
326    Output Parameters:
327 .    n - the local size
328 
329    Level: developer
330 
331     Notes:
332        Call this after the call to PetscLayoutSetUp()
333 
334     Fortran Notes:
335       Not available from Fortran
336 
337 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
338           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
339 
340 @*/
341 PetscErrorCode  PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
342 {
343   PetscFunctionBegin;
344   *n = map->n;
345   PetscFunctionReturn(0);
346 }
347 
348 /*@
349   PetscLayoutSetSize - Sets the global size for a PetscLayout object.
350 
351   Logically Collective on PetscLayout
352 
353   Input Parameters:
354 + map - pointer to the map
355 - n - the global size
356 
357   Level: developer
358 
359   Notes:
360   Call this after the call to PetscLayoutCreate()
361 
362 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
363           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
364 @*/
365 PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
366 {
367   PetscFunctionBegin;
368   map->N = n;
369   PetscFunctionReturn(0);
370 }
371 
372 /*@
373   PetscLayoutGetSize - Gets the global size for a PetscLayout object.
374 
375   Not Collective
376 
377   Input Parameters:
378 . map - pointer to the map
379 
380   Output Parameters:
381 . n - the global size
382 
383   Level: developer
384 
385   Notes:
386   Call this after the call to PetscLayoutSetUp()
387 
388 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
389           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
390 @*/
391 PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
392 {
393   PetscFunctionBegin;
394   *n = map->N;
395   PetscFunctionReturn(0);
396 }
397 
398 /*@
399   PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.
400 
401   Logically Collective on PetscLayout
402 
403   Input Parameters:
404 + map - pointer to the map
405 - bs - the size
406 
407   Level: developer
408 
409   Notes:
410   Call this after the call to PetscLayoutCreate()
411 
412 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
413           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
414 @*/
415 PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
416 {
417   PetscFunctionBegin;
418   if (bs < 0) PetscFunctionReturn(0);
419   if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
420   if (map->mapping) {
421     PetscInt       obs;
422     PetscErrorCode ierr;
423 
424     ierr = ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);CHKERRQ(ierr);
425     if (obs > 1) {
426       ierr = ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);CHKERRQ(ierr);
427     }
428   }
429   map->bs = bs;
430   PetscFunctionReturn(0);
431 }
432 
433 /*@
434   PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.
435 
436   Not Collective
437 
438   Input Parameters:
439 . map - pointer to the map
440 
441   Output Parameters:
442 . bs - the size
443 
444   Level: developer
445 
446   Notes:
447   Call this after the call to PetscLayoutSetUp()
448 
449 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
450           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
451 @*/
452 PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
453 {
454   PetscFunctionBegin;
455   *bs = PetscAbs(map->bs);
456   PetscFunctionReturn(0);
457 }
458 
459 /*@
460   PetscLayoutGetRange - gets the range of values owned by this process
461 
462   Not Collective
463 
464   Input Parameters:
465 . map - pointer to the map
466 
467   Output Parameters:
468 + rstart - first index owned by this process
469 - rend   - one more than the last index owned by this process
470 
471   Level: developer
472 
473   Notes:
474   Call this after the call to PetscLayoutSetUp()
475 
476 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
477           PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
478 @*/
479 PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
480 {
481   PetscFunctionBegin;
482   if (rstart) *rstart = map->rstart;
483   if (rend)   *rend   = map->rend;
484   PetscFunctionReturn(0);
485 }
486 
487 /*@C
488      PetscLayoutGetRanges - gets the range of values owned by all processes
489 
490     Not Collective
491 
492    Input Parameters:
493 .    map - pointer to the map
494 
495    Output Parameters:
496 .    range - start of each processors range of indices (the final entry is one more then the
497              last index on the last process)
498 
499    Level: developer
500 
501     Notes:
502        Call this after the call to PetscLayoutSetUp()
503 
504     Fortran Notes:
505       Not available from Fortran
506 
507 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
508           PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
509 
510 @*/
511 PetscErrorCode  PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
512 {
513   PetscFunctionBegin;
514   *range = map->range;
515   PetscFunctionReturn(0);
516 }
517 
518 /*@C
519    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
520 
521    Collective
522 
523    Input Arguments:
524 +  sf - star forest
525 .  layout - PetscLayout defining the global space
526 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
527 .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
528 .  localmode - copy mode for ilocal
529 -  iremote - remote locations of root vertices for each leaf on the current process
530 
531    Level: intermediate
532 
533    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
534    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
535    needed
536 
537 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
538 @*/
539 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
540 {
541   PetscErrorCode ierr;
542   PetscInt       i,nroots;
543   PetscSFNode    *remote;
544 
545   PetscFunctionBegin;
546   ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
547   ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr);
548   for (i=0; i<nleaves; i++) {
549     PetscInt owner = -1;
550     ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr);
551     remote[i].rank  = owner;
552     remote[i].index = iremote[i] - layout->range[owner];
553   }
554   ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
555   PetscFunctionReturn(0);
556 }
557 
558 /*@
559   PetscLayoutCompare - Compares two layouts
560 
561   Not Collective
562 
563   Input Parameters:
564 + mapa - pointer to the first map
565 - mapb - pointer to the second map
566 
567   Output Parameters:
568 . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise
569 
570   Level: beginner
571 
572   Notes:
573 
574 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
575           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
576 @*/
577 PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
578 {
579   PetscErrorCode ierr;
580   PetscMPIInt    sizea,sizeb;
581 
582   PetscFunctionBegin;
583   *congruent = PETSC_FALSE;
584   ierr = MPI_Comm_size(mapa->comm,&sizea);CHKERRQ(ierr);
585   ierr = MPI_Comm_size(mapb->comm,&sizeb);CHKERRQ(ierr);
586   if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
587     ierr = PetscArraycmp(mapa->range,mapb->range,sizea+1,congruent);CHKERRQ(ierr);
588   }
589   PetscFunctionReturn(0);
590 }
591 
592 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
593 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
594 {
595   PetscInt      *owners = map->range;
596   PetscInt       n      = map->n;
597   PetscSF        sf;
598   PetscInt      *lidxs,*work = NULL;
599   PetscSFNode   *ridxs;
600   PetscMPIInt    rank;
601   PetscInt       r, p = 0, len = 0;
602   PetscErrorCode ierr;
603 
604   PetscFunctionBegin;
605   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
606   /* Create SF where leaves are input idxs and roots are owned idxs */
607   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
608   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
609   for (r = 0; r < n; ++r) lidxs[r] = -1;
610   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
611   for (r = 0; r < N; ++r) {
612     const PetscInt idx = idxs[r];
613     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
614     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
615       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
616     }
617     ridxs[r].rank = p;
618     ridxs[r].index = idxs[r] - owners[p];
619   }
620   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
621   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
622   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
623   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
624   if (ogidxs) { /* communicate global idxs */
625     PetscInt cum = 0,start,*work2;
626 
627     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
628     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
629     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
630     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
631     start -= cum;
632     cum = 0;
633     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
634     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
635     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
636     ierr = PetscFree(work2);CHKERRQ(ierr);
637   }
638   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
639   /* Compress and put in indices */
640   for (r = 0; r < n; ++r)
641     if (lidxs[r] >= 0) {
642       if (work) work[len] = work[r];
643       lidxs[len++] = r;
644     }
645   if (on) *on = len;
646   if (oidxs) *oidxs = lidxs;
647   if (ogidxs) *ogidxs = work;
648   PetscFunctionReturn(0);
649 }
650