xref: /petsc/src/vec/is/utils/pmap.c (revision 45960306a3da3eb5dbb9ac58613892bf697e3cde)
1 
2 /*
3    This file contains routines for basic map object implementation.
4 */
5 
6 #include <petscis.h> /*I "petscis.h" I*/
7 #include <petscsf.h>
8 #include <petsc/private/vecimpl.h>
9 
10 /*@
11   PetscLayoutCreate - Allocates PetscLayout space and sets the map contents to the default.
12 
13   Collective on MPI_Comm
14 
15   Input Parameters:
16 + comm - the MPI communicator
17 - map - pointer to the map
18 
19   Level: advanced
20 
21   Notes:
22   Typical calling sequence
23 .vb
24        PetscLayoutCreate(MPI_Comm,PetscLayout *);
25        PetscLayoutSetBlockSize(PetscLayout,1);
26        PetscLayoutSetSize(PetscLayout,N) // or PetscLayoutSetLocalSize(PetscLayout,n);
27        PetscLayoutSetUp(PetscLayout);
28 .ve
29   Optionally use any of the following:
30 
31 + PetscLayoutGetSize(PetscLayout,PetscInt *);
32 . PetscLayoutGetLocalSize(PetscLayout,PetscInt *);
33 . PetscLayoutGetRange(PetscLayout,PetscInt *rstart,PetscInt *rend);
34 . PetscLayoutGetRanges(PetscLayout,const PetscInt *range[]);
35 - PetscLayoutDestroy(PetscLayout*);
36 
37   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is often not needed in
38   user codes unless you really gain something in their use.
39 
40 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
41           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
42 
43 @*/
44 PetscErrorCode PetscLayoutCreate(MPI_Comm comm,PetscLayout *map)
45 {
46   PetscErrorCode ierr;
47 
48   PetscFunctionBegin;
49   ierr = PetscNew(map);CHKERRQ(ierr);
50 
51   (*map)->comm   = comm;
52   (*map)->bs     = -1;
53   (*map)->n      = -1;
54   (*map)->N      = -1;
55   (*map)->range  = NULL;
56   (*map)->rstart = 0;
57   (*map)->rend   = 0;
58   PetscFunctionReturn(0);
59 }
60 
61 /*@
62   PetscLayoutDestroy - Frees a map object and frees its range if that exists.
63 
64   Collective on MPI_Comm
65 
66   Input Parameters:
67 . map - the PetscLayout
68 
69   Level: developer
70 
71   Note:
72   The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
73   recommended they not be used in user codes unless you really gain something in their use.
74 
75 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
76           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()
77 
78 @*/
79 PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
80 {
81   PetscErrorCode ierr;
82 
83   PetscFunctionBegin;
84   if (!*map) PetscFunctionReturn(0);
85   if (!(*map)->refcnt--) {
86     ierr = PetscFree((*map)->range);CHKERRQ(ierr);
87     ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
88     ierr = PetscFree((*map));CHKERRQ(ierr);
89   }
90   *map = NULL;
91   PetscFunctionReturn(0);
92 }
93 
94 static PetscErrorCode PetscLayoutSetUp_SizesFromRanges_Private(PetscLayout map)
95 {
96   PetscMPIInt    rank,size;
97   PetscErrorCode ierr;
98 
99   PetscFunctionBegin;
100   ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
101   ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
102   map->rstart = map->range[rank];
103   map->rend   = map->range[rank+1];
104   map->n      = map->rend - map->rstart;
105   map->N      = map->range[size];
106 #if defined(PETSC_USE_DEBUG)
107   /* just check that n, N and bs are consistent */
108   {
109     PetscInt tmp;
110     ierr = MPIU_Allreduce(&map->n,&tmp,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
111     if (tmp != map->N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D.\nThe provided PetscLayout is wrong.",tmp,map->N,map->n);
112   }
113   if (map->bs > 1) {
114     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
115   }
116   if (map->bs > 1) {
117     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
118   }
119 #endif
120   PetscFunctionReturn(0);
121 }
122 
123 /*@
124   PetscLayoutSetUp - given a map where you have set either the global or local
125                      size sets up the map so that it may be used.
126 
127   Collective on MPI_Comm
128 
129   Input Parameters:
130 . map - pointer to the map
131 
132   Level: developer
133 
134   Notes:
135     Typical calling sequence
136 $ PetscLayoutCreate(MPI_Comm,PetscLayout *);
137 $ PetscLayoutSetBlockSize(PetscLayout,1);
138 $ PetscLayoutSetSize(PetscLayout,n) or PetscLayoutSetLocalSize(PetscLayout,N); or both
139 $ PetscLayoutSetUp(PetscLayout);
140 $ PetscLayoutGetSize(PetscLayout,PetscInt *);
141 
142   If range exists, and local size is not set, everything gets computed from the range.
143 
144   If the local size, global size are already set and range exists then this does nothing.
145 
146 .seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutDestroy(),
147           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutCreate()
148 @*/
149 PetscErrorCode PetscLayoutSetUp(PetscLayout map)
150 {
151   PetscMPIInt    rank,size;
152   PetscInt       p;
153   PetscErrorCode ierr;
154 
155   PetscFunctionBegin;
156   if ((map->n >= 0) && (map->N >= 0) && (map->range)) PetscFunctionReturn(0);
157   if (map->range && map->n < 0) {
158     ierr = PetscLayoutSetUp_SizesFromRanges_Private(map);CHKERRQ(ierr);
159     PetscFunctionReturn(0);
160   }
161 
162   if (map->n > 0 && map->bs > 1) {
163     if (map->n % map->bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Local size %D must be divisible by blocksize %D",map->n,map->bs);
164   }
165   if (map->N > 0 && map->bs > 1) {
166     if (map->N % map->bs) SETERRQ2(map->comm,PETSC_ERR_PLIB,"Global size %D must be divisible by blocksize %D",map->N,map->bs);
167   }
168 
169   ierr = MPI_Comm_size(map->comm, &size);CHKERRQ(ierr);
170   ierr = MPI_Comm_rank(map->comm, &rank);CHKERRQ(ierr);
171   if (map->n > 0) map->n = map->n/PetscAbs(map->bs);
172   if (map->N > 0) map->N = map->N/PetscAbs(map->bs);
173   ierr = PetscSplitOwnership(map->comm,&map->n,&map->N);CHKERRQ(ierr);
174   map->n = map->n*PetscAbs(map->bs);
175   map->N = map->N*PetscAbs(map->bs);
176   if (!map->range) {
177     ierr = PetscMalloc1(size+1, &map->range);CHKERRQ(ierr);
178   }
179   ierr = MPI_Allgather(&map->n, 1, MPIU_INT, map->range+1, 1, MPIU_INT, map->comm);CHKERRQ(ierr);
180 
181   map->range[0] = 0;
182   for (p = 2; p <= size; p++) map->range[p] += map->range[p-1];
183 
184   map->rstart = map->range[rank];
185   map->rend   = map->range[rank+1];
186   PetscFunctionReturn(0);
187 }
188 
189 /*@
190   PetscLayoutDuplicate - creates a new PetscLayout with the same information as a given one. If the PetscLayout already exists it is destroyed first.
191 
192   Collective on PetscLayout
193 
194   Input Parameter:
195 . in - input PetscLayout to be duplicated
196 
197   Output Parameter:
198 . out - the copy
199 
200   Level: developer
201 
202   Notes:
203     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
204 
205 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutReference()
206 @*/
207 PetscErrorCode PetscLayoutDuplicate(PetscLayout in,PetscLayout *out)
208 {
209   PetscMPIInt    size;
210   PetscErrorCode ierr;
211   MPI_Comm       comm = in->comm;
212 
213   PetscFunctionBegin;
214   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
215   ierr = PetscLayoutCreate(comm,out);CHKERRQ(ierr);
216   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
217   ierr = PetscMemcpy(*out,in,sizeof(struct _n_PetscLayout));CHKERRQ(ierr);
218   ierr = PetscMalloc1(size+1,&(*out)->range);CHKERRQ(ierr);
219   ierr = PetscMemcpy((*out)->range,in->range,(size+1)*sizeof(PetscInt));CHKERRQ(ierr);
220 
221   (*out)->refcnt = 0;
222   PetscFunctionReturn(0);
223 }
224 
225 /*@
226   PetscLayoutReference - Causes a PETSc Vec or Mat to share a PetscLayout with one that already exists. Used by Vec/MatDuplicate_XXX()
227 
228   Collective on PetscLayout
229 
230   Input Parameter:
231 . in - input PetscLayout to be copied
232 
233   Output Parameter:
234 . out - the reference location
235 
236   Level: developer
237 
238   Notes:
239     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
240 
241   If the out location already contains a PetscLayout it is destroyed
242 
243 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
244 @*/
245 PetscErrorCode PetscLayoutReference(PetscLayout in,PetscLayout *out)
246 {
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   in->refcnt++;
251   ierr = PetscLayoutDestroy(out);CHKERRQ(ierr);
252   *out = in;
253   PetscFunctionReturn(0);
254 }
255 
256 /*@
257   PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout
258 
259   Collective on PetscLayout
260 
261   Input Parameter:
262 + in - input PetscLayout
263 - ltog - the local to global mapping
264 
265 
266   Level: developer
267 
268   Notes:
269     PetscLayoutSetUp() does not need to be called on the resulting PetscLayout
270 
271   If the ltog location already contains a PetscLayout it is destroyed
272 
273 .seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
274 @*/
275 PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
276 {
277   PetscErrorCode ierr;
278   PetscInt       bs;
279 
280   PetscFunctionBegin;
281   ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr);
282   if (in->bs > 0 && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D",in->bs,bs);
283   ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
284   ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr);
285   in->mapping = ltog;
286   PetscFunctionReturn(0);
287 }
288 
289 /*@
290   PetscLayoutSetLocalSize - Sets the local size for a PetscLayout object.
291 
292   Collective on PetscLayout
293 
294   Input Parameters:
295 + map - pointer to the map
296 - n - the local size
297 
298   Level: developer
299 
300   Notes:
301   Call this after the call to PetscLayoutCreate()
302 
303 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
304           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
305 @*/
306 PetscErrorCode PetscLayoutSetLocalSize(PetscLayout map,PetscInt n)
307 {
308   PetscFunctionBegin;
309   if (map->bs > 1 && n % map->bs) SETERRQ2(map->comm,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",n,map->bs);
310   map->n = n;
311   PetscFunctionReturn(0);
312 }
313 
314 /*@C
315      PetscLayoutGetLocalSize - Gets the local size for a PetscLayout object.
316 
317     Not Collective
318 
319    Input Parameters:
320 .    map - pointer to the map
321 
322    Output Parameters:
323 .    n - the local size
324 
325    Level: developer
326 
327     Notes:
328        Call this after the call to PetscLayoutSetUp()
329 
330     Fortran Notes:
331       Not available from Fortran
332 
333 .seealso: PetscLayoutCreate(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayoutSetUp()
334           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
335 
336 @*/
337 PetscErrorCode  PetscLayoutGetLocalSize(PetscLayout map,PetscInt *n)
338 {
339   PetscFunctionBegin;
340   *n = map->n;
341   PetscFunctionReturn(0);
342 }
343 
344 /*@
345   PetscLayoutSetSize - Sets the global size for a PetscLayout object.
346 
347   Logically Collective on PetscLayout
348 
349   Input Parameters:
350 + map - pointer to the map
351 - n - the global size
352 
353   Level: developer
354 
355   Notes:
356   Call this after the call to PetscLayoutCreate()
357 
358 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
359           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
360 @*/
361 PetscErrorCode PetscLayoutSetSize(PetscLayout map,PetscInt n)
362 {
363   PetscFunctionBegin;
364   map->N = n;
365   PetscFunctionReturn(0);
366 }
367 
368 /*@
369   PetscLayoutGetSize - Gets the global size for a PetscLayout object.
370 
371   Not Collective
372 
373   Input Parameters:
374 . map - pointer to the map
375 
376   Output Parameters:
377 . n - the global size
378 
379   Level: developer
380 
381   Notes:
382   Call this after the call to PetscLayoutSetUp()
383 
384 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
385           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize()
386 @*/
387 PetscErrorCode PetscLayoutGetSize(PetscLayout map,PetscInt *n)
388 {
389   PetscFunctionBegin;
390   *n = map->N;
391   PetscFunctionReturn(0);
392 }
393 
394 /*@
395   PetscLayoutSetBlockSize - Sets the block size for a PetscLayout object.
396 
397   Logically Collective on PetscLayout
398 
399   Input Parameters:
400 + map - pointer to the map
401 - bs - the size
402 
403   Level: developer
404 
405   Notes:
406   Call this after the call to PetscLayoutCreate()
407 
408 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
409           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
410 @*/
411 PetscErrorCode PetscLayoutSetBlockSize(PetscLayout map,PetscInt bs)
412 {
413   PetscFunctionBegin;
414   if (bs < 0) PetscFunctionReturn(0);
415   if (map->n > 0 && map->n % bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size %D not compatible with block size %D",map->n,bs);
416   if (map->mapping) {
417     PetscInt       obs;
418     PetscErrorCode ierr;
419 
420     ierr = ISLocalToGlobalMappingGetBlockSize(map->mapping,&obs);CHKERRQ(ierr);
421     if (obs > 1) {
422       ierr = ISLocalToGlobalMappingSetBlockSize(map->mapping,bs);CHKERRQ(ierr);
423     }
424   }
425   map->bs = bs;
426   PetscFunctionReturn(0);
427 }
428 
429 /*@
430   PetscLayoutGetBlockSize - Gets the block size for a PetscLayout object.
431 
432   Not Collective
433 
434   Input Parameters:
435 . map - pointer to the map
436 
437   Output Parameters:
438 . bs - the size
439 
440   Level: developer
441 
442   Notes:
443   Call this after the call to PetscLayoutSetUp()
444 
445 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(), PetscLayoutSetUp()
446           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize()
447 @*/
448 PetscErrorCode PetscLayoutGetBlockSize(PetscLayout map,PetscInt *bs)
449 {
450   PetscFunctionBegin;
451   *bs = PetscAbs(map->bs);
452   PetscFunctionReturn(0);
453 }
454 
455 /*@
456   PetscLayoutGetRange - gets the range of values owned by this process
457 
458   Not Collective
459 
460   Input Parameters:
461 . map - pointer to the map
462 
463   Output Parameters:
464 + rstart - first index owned by this process
465 - rend   - one more than the last index owned by this process
466 
467   Level: developer
468 
469   Notes:
470   Call this after the call to PetscLayoutSetUp()
471 
472 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
473           PetscLayoutGetSize(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
474 @*/
475 PetscErrorCode PetscLayoutGetRange(PetscLayout map,PetscInt *rstart,PetscInt *rend)
476 {
477   PetscFunctionBegin;
478   if (rstart) *rstart = map->rstart;
479   if (rend)   *rend   = map->rend;
480   PetscFunctionReturn(0);
481 }
482 
483 /*@C
484      PetscLayoutGetRanges - gets the range of values owned by all processes
485 
486     Not Collective
487 
488    Input Parameters:
489 .    map - pointer to the map
490 
491    Output Parameters:
492 .    range - start of each processors range of indices (the final entry is one more then the
493              last index on the last process)
494 
495    Level: developer
496 
497     Notes:
498        Call this after the call to PetscLayoutSetUp()
499 
500     Fortran Notes:
501       Not available from Fortran
502 
503 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutSetSize(),
504           PetscLayoutGetSize(), PetscLayoutGetRange(), PetscLayoutSetBlockSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
505 
506 @*/
507 PetscErrorCode  PetscLayoutGetRanges(PetscLayout map,const PetscInt *range[])
508 {
509   PetscFunctionBegin;
510   *range = map->range;
511   PetscFunctionReturn(0);
512 }
513 
514 /*@C
515    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
516 
517    Collective
518 
519    Input Arguments:
520 +  sf - star forest
521 .  layout - PetscLayout defining the global space
522 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
523 .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
524 .  localmode - copy mode for ilocal
525 -  iremote - remote locations of root vertices for each leaf on the current process
526 
527    Level: intermediate
528 
529    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
530    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
531    needed
532 
533 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
534 @*/
535 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
536 {
537   PetscErrorCode ierr;
538   PetscInt       i,nroots;
539   PetscSFNode    *remote;
540 
541   PetscFunctionBegin;
542   ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
543   ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr);
544   for (i=0; i<nleaves; i++) {
545     PetscInt owner = -1;
546     ierr = PetscLayoutFindOwner(layout,iremote[i],&owner);CHKERRQ(ierr);
547     remote[i].rank  = owner;
548     remote[i].index = iremote[i] - layout->range[owner];
549   }
550   ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
551   PetscFunctionReturn(0);
552 }
553 
554 /*@
555   PetscLayoutCompare - Compares two layouts
556 
557   Not Collective
558 
559   Input Parameters:
560 + mapa - pointer to the first map
561 - mapb - pointer to the second map
562 
563   Output Parameters:
564 . congruent - PETSC_TRUE if the two layouts are congruent, PETSC_FALSE otherwise
565 
566   Level: beginner
567 
568   Notes:
569 
570 .seealso: PetscLayoutCreate(), PetscLayoutSetLocalSize(), PetscLayoutGetLocalSize(), PetscLayoutGetBlockSize(),
571           PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutSetUp()
572 @*/
573 PetscErrorCode PetscLayoutCompare(PetscLayout mapa,PetscLayout mapb,PetscBool *congruent)
574 {
575   PetscErrorCode ierr;
576   PetscMPIInt    sizea,sizeb;
577 
578   PetscFunctionBegin;
579   *congruent = PETSC_FALSE;
580   ierr = MPI_Comm_size(mapa->comm,&sizea);CHKERRQ(ierr);
581   ierr = MPI_Comm_size(mapb->comm,&sizeb);CHKERRQ(ierr);
582   if (mapa->N == mapb->N && mapa->range && mapb->range && sizea == sizeb) {
583     ierr = PetscMemcmp(mapa->range,mapb->range,(sizea+1)*sizeof(PetscInt),congruent);CHKERRQ(ierr);
584   }
585   PetscFunctionReturn(0);
586 }
587 
588 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
589 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
590 {
591   PetscInt      *owners = map->range;
592   PetscInt       n      = map->n;
593   PetscSF        sf;
594   PetscInt      *lidxs,*work = NULL;
595   PetscSFNode   *ridxs;
596   PetscMPIInt    rank;
597   PetscInt       r, p = 0, len = 0;
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin;
601   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
602   /* Create SF where leaves are input idxs and roots are owned idxs */
603   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
604   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
605   for (r = 0; r < n; ++r) lidxs[r] = -1;
606   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
607   for (r = 0; r < N; ++r) {
608     const PetscInt idx = idxs[r];
609     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
610     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
611       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
612     }
613     ridxs[r].rank = p;
614     ridxs[r].index = idxs[r] - owners[p];
615   }
616   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
617   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
618   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
619   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
620   if (ogidxs) { /* communicate global idxs */
621     PetscInt cum = 0,start,*work2;
622 
623     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
624     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
625     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
626     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
627     start -= cum;
628     cum = 0;
629     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
630     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
631     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
632     ierr = PetscFree(work2);CHKERRQ(ierr);
633   }
634   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
635   /* Compress and put in indices */
636   for (r = 0; r < n; ++r)
637     if (lidxs[r] >= 0) {
638       if (work) work[len] = work[r];
639       lidxs[len++] = r;
640     }
641   if (on) *on = len;
642   if (oidxs) *oidxs = lidxs;
643   if (ogidxs) *ogidxs = work;
644   PetscFunctionReturn(0);
645 }
646