xref: /petsc/src/vec/is/sf/interface/sf.c (revision 487a658c8b32ba712a1dc8280daad2fd70c1dcd9)
1 #include <petsc/private/sfimpl.h> /*I "petscsf.h" I*/
2 #include <petscctable.h>
3 
4 #if defined(PETSC_USE_DEBUG)
5 #  define PetscSFCheckGraphSet(sf,arg) do {                          \
6     if (PetscUnlikely(!(sf)->graphset))                              \
7       SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetGraph() on argument %D \"%s\" before %s()",(arg),#sf,PETSC_FUNCTION_NAME); \
8   } while (0)
9 #else
10 #  define PetscSFCheckGraphSet(sf,arg) do {} while (0)
11 #endif
12 
13 const char *const PetscSFDuplicateOptions[] = {"CONFONLY","RANKS","GRAPH","PetscSFDuplicateOption","PETSCSF_DUPLICATE_",0};
14 
15 /*@
16    PetscSFCreate - create a star forest communication context
17 
18    Collective on MPI_Comm
19 
20    Input Arguments:
21 .  comm - communicator on which the star forest will operate
22 
23    Output Arguments:
24 .  sf - new star forest context
25 
26    Level: intermediate
27 
28 .seealso: PetscSFSetGraph(), PetscSFDestroy()
29 @*/
30 PetscErrorCode PetscSFCreate(MPI_Comm comm,PetscSF *sf)
31 {
32   PetscErrorCode ierr;
33   PetscSF        b;
34 
35   PetscFunctionBegin;
36   PetscValidPointer(sf,2);
37   ierr = PetscSFInitializePackage();CHKERRQ(ierr);
38 
39   ierr = PetscHeaderCreate(b,PETSCSF_CLASSID,"PetscSF","Star Forest","PetscSF",comm,PetscSFDestroy,PetscSFView);CHKERRQ(ierr);
40 
41   b->nroots    = -1;
42   b->nleaves   = -1;
43   b->minleaf   = PETSC_MAX_INT;
44   b->maxleaf   = PETSC_MIN_INT;
45   b->nranks    = -1;
46   b->rankorder = PETSC_TRUE;
47   b->ingroup   = MPI_GROUP_NULL;
48   b->outgroup  = MPI_GROUP_NULL;
49   b->graphset  = PETSC_FALSE;
50 
51   *sf = b;
52   PetscFunctionReturn(0);
53 }
54 
55 /*@
56    PetscSFReset - Reset a star forest so that different sizes or neighbors can be used
57 
58    Collective
59 
60    Input Arguments:
61 .  sf - star forest
62 
63    Level: advanced
64 
65 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFDestroy()
66 @*/
67 PetscErrorCode PetscSFReset(PetscSF sf)
68 {
69   PetscErrorCode ierr;
70 
71   PetscFunctionBegin;
72   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
73   if (sf->ops->Reset) {ierr = (*sf->ops->Reset)(sf);CHKERRQ(ierr);}
74   sf->nroots   = -1;
75   sf->nleaves  = -1;
76   sf->minleaf  = PETSC_MAX_INT;
77   sf->maxleaf  = PETSC_MIN_INT;
78   sf->mine     = NULL;
79   sf->remote   = NULL;
80   sf->graphset = PETSC_FALSE;
81   ierr = PetscFree(sf->mine_alloc);CHKERRQ(ierr);
82   ierr = PetscFree(sf->remote_alloc);CHKERRQ(ierr);
83   sf->nranks = -1;
84   ierr = PetscFree4(sf->ranks,sf->roffset,sf->rmine,sf->rremote);CHKERRQ(ierr);
85   sf->degreeknown = PETSC_FALSE;
86   ierr = PetscFree(sf->degree);CHKERRQ(ierr);
87   if (sf->ingroup  != MPI_GROUP_NULL) {ierr = MPI_Group_free(&sf->ingroup);CHKERRQ(ierr);}
88   if (sf->outgroup != MPI_GROUP_NULL) {ierr = MPI_Group_free(&sf->outgroup);CHKERRQ(ierr);}
89   ierr = PetscSFDestroy(&sf->multi);CHKERRQ(ierr);
90   sf->setupcalled = PETSC_FALSE;
91   PetscFunctionReturn(0);
92 }
93 
94 /*@C
95    PetscSFSetType - Set the PetscSF communication implementation
96 
97    Collective on PetscSF
98 
99    Input Parameters:
100 +  sf - the PetscSF context
101 -  type - a known method
102 
103    Options Database Key:
104 .  -sf_type <type> - Sets the method; use -help for a list
105    of available methods (for instance, window, pt2pt, neighbor)
106 
107    Notes:
108    See "include/petscsf.h" for available methods (for instance)
109 +    PETSCSFWINDOW - MPI-2/3 one-sided
110 -    PETSCSFBASIC - basic implementation using MPI-1 two-sided
111 
112   Level: intermediate
113 
114 .keywords: PetscSF, set, type
115 
116 .seealso: PetscSFType, PetscSFCreate()
117 @*/
118 PetscErrorCode PetscSFSetType(PetscSF sf,PetscSFType type)
119 {
120   PetscErrorCode ierr,(*r)(PetscSF);
121   PetscBool      match;
122 
123   PetscFunctionBegin;
124   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
125   PetscValidCharPointer(type,2);
126 
127   ierr = PetscObjectTypeCompare((PetscObject)sf,type,&match);CHKERRQ(ierr);
128   if (match) PetscFunctionReturn(0);
129 
130   ierr = PetscFunctionListFind(PetscSFList,type,&r);CHKERRQ(ierr);
131   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested PetscSF type %s",type);
132   /* Destroy the previous PetscSF implementation context */
133   if (sf->ops->Destroy) {ierr = (*(sf)->ops->Destroy)(sf);CHKERRQ(ierr);}
134   ierr = PetscMemzero(sf->ops,sizeof(*sf->ops));CHKERRQ(ierr);
135   ierr = PetscObjectChangeTypeName((PetscObject)sf,type);CHKERRQ(ierr);
136   ierr = (*r)(sf);CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 /*@C
141   PetscSFGetType - Get the PetscSF communication implementation
142 
143   Not Collective
144 
145   Input Parameter:
146 . sf  - the PetscSF context
147 
148   Output Parameter:
149 . type - the PetscSF type name
150 
151   Level: intermediate
152 
153 .keywords: PetscSF, get, type
154 .seealso: PetscSFSetType(), PetscSFCreate()
155 @*/
156 PetscErrorCode PetscSFGetType(PetscSF sf, PetscSFType *type)
157 {
158   PetscFunctionBegin;
159   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID,1);
160   PetscValidPointer(type,2);
161   *type = ((PetscObject)sf)->type_name;
162   PetscFunctionReturn(0);
163 }
164 
165 /*@
166    PetscSFDestroy - destroy star forest
167 
168    Collective
169 
170    Input Arguments:
171 .  sf - address of star forest
172 
173    Level: intermediate
174 
175 .seealso: PetscSFCreate(), PetscSFReset()
176 @*/
177 PetscErrorCode PetscSFDestroy(PetscSF *sf)
178 {
179   PetscErrorCode ierr;
180 
181   PetscFunctionBegin;
182   if (!*sf) PetscFunctionReturn(0);
183   PetscValidHeaderSpecific((*sf),PETSCSF_CLASSID,1);
184   if (--((PetscObject)(*sf))->refct > 0) {*sf = NULL; PetscFunctionReturn(0);}
185   ierr = PetscSFReset(*sf);CHKERRQ(ierr);
186   if ((*sf)->ops->Destroy) {ierr = (*(*sf)->ops->Destroy)(*sf);CHKERRQ(ierr);}
187   ierr = PetscHeaderDestroy(sf);CHKERRQ(ierr);
188   PetscFunctionReturn(0);
189 }
190 
191 /*@
192    PetscSFSetUp - set up communication structures
193 
194    Collective
195 
196    Input Arguments:
197 .  sf - star forest communication object
198 
199    Level: beginner
200 
201 .seealso: PetscSFSetFromOptions(), PetscSFSetType()
202 @*/
203 PetscErrorCode PetscSFSetUp(PetscSF sf)
204 {
205   PetscErrorCode ierr;
206 
207   PetscFunctionBegin;
208   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
209   PetscSFCheckGraphSet(sf,1);
210   if (sf->setupcalled) PetscFunctionReturn(0);
211   if (!((PetscObject)sf)->type_name) {ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);}
212   ierr = PetscLogEventBegin(PETSCSF_SetUp,sf,0,0,0);CHKERRQ(ierr);
213   if (sf->ops->SetUp) {ierr = (*sf->ops->SetUp)(sf);CHKERRQ(ierr);}
214   ierr = PetscLogEventEnd(PETSCSF_SetUp,sf,0,0,0);CHKERRQ(ierr);
215   sf->setupcalled = PETSC_TRUE;
216   PetscFunctionReturn(0);
217 }
218 
219 /*@
220    PetscSFSetFromOptions - set PetscSF options using the options database
221 
222    Logically Collective
223 
224    Input Arguments:
225 .  sf - star forest
226 
227    Options Database Keys:
228 +  -sf_type - implementation type, see PetscSFSetType()
229 -  -sf_rank_order - sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise
230 
231    Level: intermediate
232 
233 .keywords: KSP, set, from, options, database
234 
235 .seealso: PetscSFWindowSetSyncType()
236 @*/
237 PetscErrorCode PetscSFSetFromOptions(PetscSF sf)
238 {
239   PetscSFType    deft;
240   char           type[256];
241   PetscErrorCode ierr;
242   PetscBool      flg;
243 
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
246   ierr = PetscObjectOptionsBegin((PetscObject)sf);CHKERRQ(ierr);
247   deft = ((PetscObject)sf)->type_name ? ((PetscObject)sf)->type_name : PETSCSFBASIC;
248   ierr = PetscOptionsFList("-sf_type","PetscSF implementation type","PetscSFSetType",PetscSFList,deft,type,sizeof(type),&flg);CHKERRQ(ierr);
249   ierr = PetscSFSetType(sf,flg ? type : deft);CHKERRQ(ierr);
250   ierr = PetscOptionsBool("-sf_rank_order","sort composite points for gathers and scatters in rank order, gathers are non-deterministic otherwise","PetscSFSetRankOrder",sf->rankorder,&sf->rankorder,NULL);CHKERRQ(ierr);
251   if (sf->ops->SetFromOptions) {ierr = (*sf->ops->SetFromOptions)(PetscOptionsObject,sf);CHKERRQ(ierr);}
252   ierr = PetscOptionsEnd();CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 /*@
257    PetscSFSetRankOrder - sort multi-points for gathers and scatters by rank order
258 
259    Logically Collective
260 
261    Input Arguments:
262 +  sf - star forest
263 -  flg - PETSC_TRUE to sort, PETSC_FALSE to skip sorting (lower setup cost, but non-deterministic)
264 
265    Level: advanced
266 
267 .seealso: PetscSFGatherBegin(), PetscSFScatterBegin()
268 @*/
269 PetscErrorCode PetscSFSetRankOrder(PetscSF sf,PetscBool flg)
270 {
271 
272   PetscFunctionBegin;
273   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
274   PetscValidLogicalCollectiveBool(sf,flg,2);
275   if (sf->multi) SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_WRONGSTATE,"Rank ordering must be set before first call to PetscSFGatherBegin() or PetscSFScatterBegin()");
276   sf->rankorder = flg;
277   PetscFunctionReturn(0);
278 }
279 
280 /*@
281    PetscSFSetGraph - Set a parallel star forest
282 
283    Collective
284 
285    Input Arguments:
286 +  sf - star forest
287 .  nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
288 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
289 .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
290 .  localmode - copy mode for ilocal
291 .  iremote - remote locations of root vertices for each leaf on the current process
292 -  remotemode - copy mode for iremote
293 
294    Level: intermediate
295 
296    Notes:
297     In Fortran you must use PETSC_COPY_VALUES for localmode and remotemode
298 
299    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
300    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
301    needed
302 
303 .seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph()
304 @*/
305 PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode)
306 {
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
311   if (nleaves > 0 && ilocal) PetscValidIntPointer(ilocal,4);
312   if (nleaves > 0) PetscValidPointer(iremote,6);
313   if (nroots  < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nroots %D, cannot be negative",nroots);
314   if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves);
315 
316   ierr = PetscSFReset(sf);CHKERRQ(ierr);
317   ierr = PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr);
318 
319   sf->nroots  = nroots;
320   sf->nleaves = nleaves;
321 
322   if (nleaves && ilocal) {
323     PetscInt i;
324     PetscInt minleaf = PETSC_MAX_INT;
325     PetscInt maxleaf = PETSC_MIN_INT;
326     int      contiguous = 1;
327     for (i=0; i<nleaves; i++) {
328       minleaf = PetscMin(minleaf,ilocal[i]);
329       maxleaf = PetscMax(maxleaf,ilocal[i]);
330       contiguous &= (ilocal[i] == i);
331     }
332     sf->minleaf = minleaf;
333     sf->maxleaf = maxleaf;
334     if (contiguous) {
335       if (localmode == PETSC_OWN_POINTER) {
336         ierr = PetscFree(ilocal);CHKERRQ(ierr);
337       }
338       ilocal = NULL;
339     }
340   } else {
341     sf->minleaf = 0;
342     sf->maxleaf = nleaves - 1;
343   }
344 
345   if (ilocal) {
346     switch (localmode) {
347     case PETSC_COPY_VALUES:
348       ierr = PetscMalloc1(nleaves,&sf->mine_alloc);CHKERRQ(ierr);
349       ierr = PetscMemcpy(sf->mine_alloc,ilocal,nleaves*sizeof(*ilocal));CHKERRQ(ierr);
350       sf->mine = sf->mine_alloc;
351       break;
352     case PETSC_OWN_POINTER:
353       sf->mine_alloc = (PetscInt*)ilocal;
354       sf->mine       = sf->mine_alloc;
355       break;
356     case PETSC_USE_POINTER:
357       sf->mine_alloc = NULL;
358       sf->mine       = (PetscInt*)ilocal;
359       break;
360     default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode");
361     }
362   }
363 
364   switch (remotemode) {
365   case PETSC_COPY_VALUES:
366     ierr = PetscMalloc1(nleaves,&sf->remote_alloc);CHKERRQ(ierr);
367     ierr = PetscMemcpy(sf->remote_alloc,iremote,nleaves*sizeof(*iremote));CHKERRQ(ierr);
368     sf->remote = sf->remote_alloc;
369     break;
370   case PETSC_OWN_POINTER:
371     sf->remote_alloc = (PetscSFNode*)iremote;
372     sf->remote       = sf->remote_alloc;
373     break;
374   case PETSC_USE_POINTER:
375     sf->remote_alloc = NULL;
376     sf->remote       = (PetscSFNode*)iremote;
377     break;
378   default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode");
379   }
380 
381   ierr = PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr);
382   sf->graphset = PETSC_TRUE;
383   PetscFunctionReturn(0);
384 }
385 
386 /*@
387    PetscSFCreateInverseSF - given a PetscSF in which all vertices have degree 1, creates the inverse map
388 
389    Collective
390 
391    Input Arguments:
392 .  sf - star forest to invert
393 
394    Output Arguments:
395 .  isf - inverse of sf
396 
397    Level: advanced
398 
399    Notes:
400    All roots must have degree 1.
401 
402    The local space may be a permutation, but cannot be sparse.
403 
404 .seealso: PetscSFSetGraph()
405 @*/
406 PetscErrorCode PetscSFCreateInverseSF(PetscSF sf,PetscSF *isf)
407 {
408   PetscErrorCode ierr;
409   PetscMPIInt    rank;
410   PetscInt       i,nroots,nleaves,maxlocal,count,*newilocal;
411   const PetscInt *ilocal;
412   PetscSFNode    *roots,*leaves;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
416   PetscSFCheckGraphSet(sf,1);
417   PetscValidPointer(isf,2);
418 
419   ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,NULL);CHKERRQ(ierr);
420   maxlocal = sf->maxleaf+1; /* TODO: We should use PetscSFGetLeafRange() */
421 
422   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr);
423   ierr = PetscMalloc2(nroots,&roots,maxlocal,&leaves);CHKERRQ(ierr);
424   for (i=0; i<maxlocal; i++) {
425     leaves[i].rank  = rank;
426     leaves[i].index = i;
427   }
428   for (i=0; i <nroots; i++) {
429     roots[i].rank  = -1;
430     roots[i].index = -1;
431   }
432   ierr = PetscSFReduceBegin(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr);
433   ierr = PetscSFReduceEnd(sf,MPIU_2INT,leaves,roots,MPIU_REPLACE);CHKERRQ(ierr);
434 
435   /* Check whether our leaves are sparse */
436   for (i=0,count=0; i<nroots; i++) if (roots[i].rank >= 0) count++;
437   if (count == nroots) newilocal = NULL;
438   else {                        /* Index for sparse leaves and compact "roots" array (which is to become our leaves). */
439     ierr = PetscMalloc1(count,&newilocal);CHKERRQ(ierr);
440     for (i=0,count=0; i<nroots; i++) {
441       if (roots[i].rank >= 0) {
442         newilocal[count]   = i;
443         roots[count].rank  = roots[i].rank;
444         roots[count].index = roots[i].index;
445         count++;
446       }
447     }
448   }
449 
450   ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,isf);CHKERRQ(ierr);
451   ierr = PetscSFSetGraph(*isf,maxlocal,count,newilocal,PETSC_OWN_POINTER,roots,PETSC_COPY_VALUES);CHKERRQ(ierr);
452   ierr = PetscFree2(roots,leaves);CHKERRQ(ierr);
453   PetscFunctionReturn(0);
454 }
455 
456 /*@
457    PetscSFDuplicate - duplicate a PetscSF, optionally preserving rank connectivity and graph
458 
459    Collective
460 
461    Input Arguments:
462 +  sf - communication object to duplicate
463 -  opt - PETSCSF_DUPLICATE_CONFONLY, PETSCSF_DUPLICATE_RANKS, or PETSCSF_DUPLICATE_GRAPH (see PetscSFDuplicateOption)
464 
465    Output Arguments:
466 .  newsf - new communication object
467 
468    Level: beginner
469 
470 .seealso: PetscSFCreate(), PetscSFSetType(), PetscSFSetGraph()
471 @*/
472 PetscErrorCode PetscSFDuplicate(PetscSF sf,PetscSFDuplicateOption opt,PetscSF *newsf)
473 {
474   PetscSFType    type;
475   PetscErrorCode ierr;
476 
477   PetscFunctionBegin;
478   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
479   PetscValidLogicalCollectiveEnum(sf,opt,2);
480   PetscValidPointer(newsf,3);
481   ierr = PetscSFCreate(PetscObjectComm((PetscObject)sf),newsf);CHKERRQ(ierr);
482   ierr = PetscSFGetType(sf,&type);CHKERRQ(ierr);
483   if (type) {ierr = PetscSFSetType(*newsf,type);CHKERRQ(ierr);}
484   if (opt == PETSCSF_DUPLICATE_GRAPH) {
485     PetscInt          nroots,nleaves;
486     const PetscInt    *ilocal;
487     const PetscSFNode *iremote;
488     PetscSFCheckGraphSet(sf,1);
489     ierr = PetscSFGetGraph(sf,&nroots,&nleaves,&ilocal,&iremote);CHKERRQ(ierr);
490     ierr = PetscSFSetGraph(*newsf,nroots,nleaves,ilocal,PETSC_COPY_VALUES,iremote,PETSC_COPY_VALUES);CHKERRQ(ierr);
491   }
492   if (sf->ops->Duplicate) {ierr = (*sf->ops->Duplicate)(sf,opt,*newsf);CHKERRQ(ierr);}
493   PetscFunctionReturn(0);
494 }
495 
496 /*@C
497    PetscSFGetGraph - Get the graph specifying a parallel star forest
498 
499    Not Collective
500 
501    Input Arguments:
502 .  sf - star forest
503 
504    Output Arguments:
505 +  nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
506 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
507 .  ilocal - locations of leaves in leafdata buffers
508 -  iremote - remote locations of root vertices for each leaf on the current process
509 
510    Notes:
511    We are not currently requiring that the graph is set, thus returning nroots=-1 if it has not been set yet
512 
513    Level: intermediate
514 
515 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph()
516 @*/
517 PetscErrorCode PetscSFGetGraph(PetscSF sf,PetscInt *nroots,PetscInt *nleaves,const PetscInt **ilocal,const PetscSFNode **iremote)
518 {
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
522   if (nroots) *nroots = sf->nroots;
523   if (nleaves) *nleaves = sf->nleaves;
524   if (ilocal) *ilocal = sf->mine;
525   if (iremote) *iremote = sf->remote;
526   PetscFunctionReturn(0);
527 }
528 
529 /*@
530    PetscSFGetLeafRange - Get the active leaf ranges
531 
532    Not Collective
533 
534    Input Arguments:
535 .  sf - star forest
536 
537    Output Arguments:
538 +  minleaf - minimum active leaf on this process
539 -  maxleaf - maximum active leaf on this process
540 
541    Level: developer
542 
543 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
544 @*/
545 PetscErrorCode PetscSFGetLeafRange(PetscSF sf,PetscInt *minleaf,PetscInt *maxleaf)
546 {
547 
548   PetscFunctionBegin;
549   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
550   PetscSFCheckGraphSet(sf,1);
551   if (minleaf) *minleaf = sf->minleaf;
552   if (maxleaf) *maxleaf = sf->maxleaf;
553   PetscFunctionReturn(0);
554 }
555 
556 /*@C
557    PetscSFView - view a star forest
558 
559    Collective
560 
561    Input Arguments:
562 +  sf - star forest
563 -  viewer - viewer to display graph, for example PETSC_VIEWER_STDOUT_WORLD
564 
565    Level: beginner
566 
567 .seealso: PetscSFCreate(), PetscSFSetGraph()
568 @*/
569 PetscErrorCode PetscSFView(PetscSF sf,PetscViewer viewer)
570 {
571   PetscErrorCode    ierr;
572   PetscBool         iascii;
573   PetscViewerFormat format;
574 
575   PetscFunctionBegin;
576   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
577   if (!viewer) {ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)sf),&viewer);CHKERRQ(ierr);}
578   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
579   PetscCheckSameComm(sf,1,viewer,2);
580   if (sf->graphset) {ierr = PetscSFSetUp(sf);CHKERRQ(ierr);}
581   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
582   if (iascii) {
583     PetscMPIInt rank;
584     PetscInt    ii,i,j;
585 
586     ierr = PetscObjectPrintClassNamePrefixType((PetscObject)sf,viewer);CHKERRQ(ierr);
587     ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr);
588     if (sf->ops->View) {ierr = (*sf->ops->View)(sf,viewer);CHKERRQ(ierr);}
589     if (!sf->graphset) {
590       ierr = PetscViewerASCIIPrintf(viewer,"PetscSFSetGraph() has not been called yet\n");CHKERRQ(ierr);
591       ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
592       PetscFunctionReturn(0);
593     }
594     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr);
595     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
596     ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Number of roots=%D, leaves=%D, remote ranks=%D\n",rank,sf->nroots,sf->nleaves,sf->nranks);CHKERRQ(ierr);
597     for (i=0; i<sf->nleaves; i++) {
598       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %D <- (%D,%D)\n",rank,sf->mine ? sf->mine[i] : i,sf->remote[i].rank,sf->remote[i].index);CHKERRQ(ierr);
599     }
600     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
601     ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
602     if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
603       PetscMPIInt *tmpranks,*perm;
604       ierr = PetscMalloc2(sf->nranks,&tmpranks,sf->nranks,&perm);CHKERRQ(ierr);
605       ierr = PetscMemcpy(tmpranks,sf->ranks,sf->nranks*sizeof(tmpranks[0]));CHKERRQ(ierr);
606       for (i=0; i<sf->nranks; i++) perm[i] = i;
607       ierr = PetscSortMPIIntWithArray(sf->nranks,tmpranks,perm);CHKERRQ(ierr);
608       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Roots referenced by my leaves, by rank\n",rank);CHKERRQ(ierr);
609       for (ii=0; ii<sf->nranks; ii++) {
610         i = perm[ii];
611         ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] %d: %D edges\n",rank,sf->ranks[i],sf->roffset[i+1]-sf->roffset[i]);CHKERRQ(ierr);
612         for (j=sf->roffset[i]; j<sf->roffset[i+1]; j++) {
613           ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d]    %D <- %D\n",rank,sf->rmine[j],sf->rremote[j]);CHKERRQ(ierr);
614         }
615       }
616       ierr = PetscFree2(tmpranks,perm);CHKERRQ(ierr);
617     }
618     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
619     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
620     ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr);
621   }
622   PetscFunctionReturn(0);
623 }
624 
625 /*@C
626    PetscSFGetRanks - Get ranks and number of vertices referenced by leaves on this process
627 
628    Not Collective
629 
630    Input Arguments:
631 .  sf - star forest
632 
633    Output Arguments:
634 +  nranks - number of ranks referenced by local part
635 .  ranks - array of ranks
636 .  roffset - offset in rmine/rremote for each rank (length nranks+1)
637 .  rmine - concatenated array holding local indices referencing each remote rank
638 -  rremote - concatenated array holding remote indices referenced for each remote rank
639 
640    Level: developer
641 
642 .seealso: PetscSFSetGraph()
643 @*/
644 PetscErrorCode PetscSFGetRanks(PetscSF sf,PetscInt *nranks,const PetscMPIInt **ranks,const PetscInt **roffset,const PetscInt **rmine,const PetscInt **rremote)
645 {
646 
647   PetscFunctionBegin;
648   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
649   if (!sf->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetUp() before obtaining ranks");
650   if (nranks)  *nranks  = sf->nranks;
651   if (ranks)   *ranks   = sf->ranks;
652   if (roffset) *roffset = sf->roffset;
653   if (rmine)   *rmine   = sf->rmine;
654   if (rremote) *rremote = sf->rremote;
655   PetscFunctionReturn(0);
656 }
657 
658 static PetscBool InList(PetscMPIInt needle,PetscMPIInt n,const PetscMPIInt *list) {
659   PetscInt i;
660   for (i=0; i<n; i++) {
661     if (needle == list[i]) return PETSC_TRUE;
662   }
663   return PETSC_FALSE;
664 }
665 
666 /*@C
667    PetscSFSetUpRanks - Set up data structures associated with ranks; this is for internal use by PetscSF implementations.
668 
669    Collective
670 
671    Input Arguments:
672 +  sf - PetscSF to set up; PetscSFSetGraph() must have been called
673 -  dgroup - MPI_Group of ranks to be distinguished (e.g., for self or shared memory exchange)
674 
675    Level: developer
676 
677 .seealso: PetscSFGetRanks()
678 @*/
679 PetscErrorCode PetscSFSetUpRanks(PetscSF sf,MPI_Group dgroup)
680 {
681   PetscErrorCode     ierr;
682   PetscTable         table;
683   PetscTablePosition pos;
684   PetscMPIInt        size,groupsize,*groupranks;
685   PetscInt           i,*rcount,*ranks;
686 
687   PetscFunctionBegin;
688   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
689   PetscSFCheckGraphSet(sf,1);
690   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);CHKERRQ(ierr);
691   ierr = PetscTableCreate(10,size,&table);CHKERRQ(ierr);
692   for (i=0; i<sf->nleaves; i++) {
693     /* Log 1-based rank */
694     ierr = PetscTableAdd(table,sf->remote[i].rank+1,1,ADD_VALUES);CHKERRQ(ierr);
695   }
696   ierr = PetscTableGetCount(table,&sf->nranks);CHKERRQ(ierr);
697   ierr = PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,sf->nleaves,&sf->rmine,sf->nleaves,&sf->rremote);CHKERRQ(ierr);
698   ierr = PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);CHKERRQ(ierr);
699   ierr = PetscTableGetHeadPosition(table,&pos);CHKERRQ(ierr);
700   for (i=0; i<sf->nranks; i++) {
701     ierr = PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);CHKERRQ(ierr);
702     ranks[i]--;             /* Convert back to 0-based */
703   }
704   ierr = PetscTableDestroy(&table);CHKERRQ(ierr);
705 
706   /* We expect that dgroup is reliably "small" while nranks could be large */
707   {
708     MPI_Group group = MPI_GROUP_NULL;
709     PetscMPIInt *dgroupranks;
710     ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr);
711     ierr = MPI_Group_size(dgroup,&groupsize);CHKERRQ(ierr);
712     ierr = PetscMalloc1(groupsize,&dgroupranks);CHKERRQ(ierr);
713     ierr = PetscMalloc1(groupsize,&groupranks);CHKERRQ(ierr);
714     for (i=0; i<groupsize; i++) dgroupranks[i] = i;
715     if (groupsize) {ierr = MPI_Group_translate_ranks(dgroup,groupsize,dgroupranks,group,groupranks);CHKERRQ(ierr);}
716     ierr = MPI_Group_free(&group);CHKERRQ(ierr);
717     ierr = PetscFree(dgroupranks);CHKERRQ(ierr);
718   }
719 
720   /* Partition ranks[] into distinguished (first sf->ndranks) followed by non-distinguished */
721   for (sf->ndranks=0,i=sf->nranks; sf->ndranks<i; ) {
722     for (i--; sf->ndranks<i; i--) { /* Scan i backward looking for distinguished rank */
723       if (InList(ranks[i],groupsize,groupranks)) break;
724     }
725     for ( ; sf->ndranks<=i; sf->ndranks++) { /* Scan sf->ndranks forward looking for non-distinguished rank */
726       if (!InList(ranks[sf->ndranks],groupsize,groupranks)) break;
727     }
728     if (sf->ndranks < i) {                         /* Swap ranks[sf->ndranks] with ranks[i] */
729       PetscInt    tmprank,tmpcount;
730       tmprank = ranks[i];
731       tmpcount = rcount[i];
732       ranks[i] = ranks[sf->ndranks];
733       rcount[i] = rcount[sf->ndranks];
734       ranks[sf->ndranks] = tmprank;
735       rcount[sf->ndranks] = tmpcount;
736       sf->ndranks++;
737     }
738   }
739   ierr = PetscFree(groupranks);CHKERRQ(ierr);
740   ierr = PetscSortIntWithArray(sf->ndranks,ranks,rcount);CHKERRQ(ierr);
741   ierr = PetscSortIntWithArray(sf->nranks-sf->ndranks,ranks+sf->ndranks,rcount+sf->ndranks);CHKERRQ(ierr);
742   sf->roffset[0] = 0;
743   for (i=0; i<sf->nranks; i++) {
744     ierr = PetscMPIIntCast(ranks[i],sf->ranks+i);CHKERRQ(ierr);
745     sf->roffset[i+1] = sf->roffset[i] + rcount[i];
746     rcount[i]        = 0;
747   }
748   for (i=0; i<sf->nleaves; i++) {
749     PetscInt irank;
750     /* Search for index of iremote[i].rank in sf->ranks */
751     ierr = PetscFindMPIInt(sf->remote[i].rank,sf->ndranks,sf->ranks,&irank);CHKERRQ(ierr);
752     if (irank < 0) {
753       ierr = PetscFindMPIInt(sf->remote[i].rank,sf->nranks-sf->ndranks,sf->ranks+sf->ndranks,&irank);CHKERRQ(ierr);
754       if (irank >= 0) irank += sf->ndranks;
755     }
756     if (irank < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",sf->remote[i].rank);
757     sf->rmine[sf->roffset[irank] + rcount[irank]]   = sf->mine ? sf->mine[i] : i;
758     sf->rremote[sf->roffset[irank] + rcount[irank]] = sf->remote[i].index;
759     rcount[irank]++;
760   }
761   ierr = PetscFree2(rcount,ranks);CHKERRQ(ierr);
762   PetscFunctionReturn(0);
763 }
764 
765 /*@C
766    PetscSFGetGroups - gets incoming and outgoing process groups
767 
768    Collective
769 
770    Input Argument:
771 .  sf - star forest
772 
773    Output Arguments:
774 +  incoming - group of origin processes for incoming edges (leaves that reference my roots)
775 -  outgoing - group of destination processes for outgoing edges (roots that I reference)
776 
777    Level: developer
778 
779 .seealso: PetscSFGetWindow(), PetscSFRestoreWindow()
780 @*/
781 PetscErrorCode PetscSFGetGroups(PetscSF sf,MPI_Group *incoming,MPI_Group *outgoing)
782 {
783   PetscErrorCode ierr;
784   MPI_Group      group = MPI_GROUP_NULL;
785 
786   PetscFunctionBegin;
787   if (!sf->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Must call PetscSFSetUp() before obtaining groups");
788   if (sf->ingroup == MPI_GROUP_NULL) {
789     PetscInt       i;
790     const PetscInt *indegree;
791     PetscMPIInt    rank,*outranks,*inranks;
792     PetscSFNode    *remote;
793     PetscSF        bgcount;
794 
795     /* Compute the number of incoming ranks */
796     ierr = PetscMalloc1(sf->nranks,&remote);CHKERRQ(ierr);
797     for (i=0; i<sf->nranks; i++) {
798       remote[i].rank  = sf->ranks[i];
799       remote[i].index = 0;
800     }
801     ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_CONFONLY,&bgcount);CHKERRQ(ierr);
802     ierr = PetscSFSetGraph(bgcount,1,sf->nranks,NULL,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
803     ierr = PetscSFComputeDegreeBegin(bgcount,&indegree);CHKERRQ(ierr);
804     ierr = PetscSFComputeDegreeEnd(bgcount,&indegree);CHKERRQ(ierr);
805 
806     /* Enumerate the incoming ranks */
807     ierr = PetscMalloc2(indegree[0],&inranks,sf->nranks,&outranks);CHKERRQ(ierr);
808     ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr);
809     for (i=0; i<sf->nranks; i++) outranks[i] = rank;
810     ierr = PetscSFGatherBegin(bgcount,MPI_INT,outranks,inranks);CHKERRQ(ierr);
811     ierr = PetscSFGatherEnd(bgcount,MPI_INT,outranks,inranks);CHKERRQ(ierr);
812     ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr);
813     ierr = MPI_Group_incl(group,indegree[0],inranks,&sf->ingroup);CHKERRQ(ierr);
814     ierr = MPI_Group_free(&group);CHKERRQ(ierr);
815     ierr = PetscFree2(inranks,outranks);CHKERRQ(ierr);
816     ierr = PetscSFDestroy(&bgcount);CHKERRQ(ierr);
817   }
818   *incoming = sf->ingroup;
819 
820   if (sf->outgroup == MPI_GROUP_NULL) {
821     ierr = MPI_Comm_group(PetscObjectComm((PetscObject)sf),&group);CHKERRQ(ierr);
822     ierr = MPI_Group_incl(group,sf->nranks,sf->ranks,&sf->outgroup);CHKERRQ(ierr);
823     ierr = MPI_Group_free(&group);CHKERRQ(ierr);
824   }
825   *outgoing = sf->outgroup;
826   PetscFunctionReturn(0);
827 }
828 
829 /*@
830    PetscSFGetMultiSF - gets the inner SF implemeting gathers and scatters
831 
832    Collective
833 
834    Input Argument:
835 .  sf - star forest that may contain roots with 0 or with more than 1 vertex
836 
837    Output Arguments:
838 .  multi - star forest with split roots, such that each root has degree exactly 1
839 
840    Level: developer
841 
842    Notes:
843 
844    In most cases, users should use PetscSFGatherBegin() and PetscSFScatterBegin() instead of manipulating multi
845    directly. Since multi satisfies the stronger condition that each entry in the global space has exactly one incoming
846    edge, it is a candidate for future optimization that might involve its removal.
847 
848 .seealso: PetscSFSetGraph(), PetscSFGatherBegin(), PetscSFScatterBegin()
849 @*/
850 PetscErrorCode PetscSFGetMultiSF(PetscSF sf,PetscSF *multi)
851 {
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
856   PetscValidPointer(multi,2);
857   if (sf->nroots < 0) {         /* Graph has not been set yet; why do we need this? */
858     ierr   = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);CHKERRQ(ierr);
859     *multi = sf->multi;
860     PetscFunctionReturn(0);
861   }
862   if (!sf->multi) {
863     const PetscInt *indegree;
864     PetscInt       i,*inoffset,*outones,*outoffset,maxlocal;
865     PetscSFNode    *remote;
866     maxlocal = sf->maxleaf+1; /* TODO: We should use PetscSFGetLeafRange() */
867     ierr = PetscSFComputeDegreeBegin(sf,&indegree);CHKERRQ(ierr);
868     ierr = PetscSFComputeDegreeEnd(sf,&indegree);CHKERRQ(ierr);
869     ierr = PetscMalloc3(sf->nroots+1,&inoffset,maxlocal,&outones,maxlocal,&outoffset);CHKERRQ(ierr);
870     inoffset[0] = 0;
871     for (i=0; i<sf->nroots; i++) inoffset[i+1] = inoffset[i] + indegree[i];
872     for (i=0; i<maxlocal; i++) outones[i] = 1;
873     ierr = PetscSFFetchAndOpBegin(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);CHKERRQ(ierr);
874     ierr = PetscSFFetchAndOpEnd(sf,MPIU_INT,inoffset,outones,outoffset,MPI_SUM);CHKERRQ(ierr);
875     for (i=0; i<sf->nroots; i++) inoffset[i] -= indegree[i]; /* Undo the increment */
876 #if 0
877 #if defined(PETSC_USE_DEBUG)                                 /* Check that the expected number of increments occurred */
878     for (i=0; i<sf->nroots; i++) {
879       if (inoffset[i] + indegree[i] != inoffset[i+1]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Incorrect result after PetscSFFetchAndOp");
880     }
881 #endif
882 #endif
883     ierr = PetscMalloc1(sf->nleaves,&remote);CHKERRQ(ierr);
884     for (i=0; i<sf->nleaves; i++) {
885       remote[i].rank  = sf->remote[i].rank;
886       remote[i].index = outoffset[sf->mine ? sf->mine[i] : i];
887     }
888     ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,&sf->multi);CHKERRQ(ierr);
889     ierr = PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
890     if (sf->rankorder) {        /* Sort the ranks */
891       PetscMPIInt rank;
892       PetscInt    *inranks,*newoffset,*outranks,*newoutoffset,*tmpoffset,maxdegree;
893       PetscSFNode *newremote;
894       ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)sf),&rank);CHKERRQ(ierr);
895       for (i=0,maxdegree=0; i<sf->nroots; i++) maxdegree = PetscMax(maxdegree,indegree[i]);
896       ierr = PetscMalloc5(sf->multi->nroots,&inranks,sf->multi->nroots,&newoffset,maxlocal,&outranks,maxlocal,&newoutoffset,maxdegree,&tmpoffset);CHKERRQ(ierr);
897       for (i=0; i<maxlocal; i++) outranks[i] = rank;
898       ierr = PetscSFReduceBegin(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr);
899       ierr = PetscSFReduceEnd(sf->multi,MPIU_INT,outranks,inranks,MPIU_REPLACE);CHKERRQ(ierr);
900       /* Sort the incoming ranks at each vertex, build the inverse map */
901       for (i=0; i<sf->nroots; i++) {
902         PetscInt j;
903         for (j=0; j<indegree[i]; j++) tmpoffset[j] = j;
904         ierr = PetscSortIntWithArray(indegree[i],inranks+inoffset[i],tmpoffset);CHKERRQ(ierr);
905         for (j=0; j<indegree[i]; j++) newoffset[inoffset[i] + tmpoffset[j]] = inoffset[i] + j;
906       }
907       ierr = PetscSFBcastBegin(sf->multi,MPIU_INT,newoffset,newoutoffset);CHKERRQ(ierr);
908       ierr = PetscSFBcastEnd(sf->multi,MPIU_INT,newoffset,newoutoffset);CHKERRQ(ierr);
909       ierr = PetscMalloc1(sf->nleaves,&newremote);CHKERRQ(ierr);
910       for (i=0; i<sf->nleaves; i++) {
911         newremote[i].rank  = sf->remote[i].rank;
912         newremote[i].index = newoutoffset[sf->mine ? sf->mine[i] : i];
913       }
914       ierr = PetscSFSetGraph(sf->multi,inoffset[sf->nroots],sf->nleaves,sf->mine,PETSC_COPY_VALUES,newremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
915       ierr = PetscFree5(inranks,newoffset,outranks,newoutoffset,tmpoffset);CHKERRQ(ierr);
916     }
917     ierr = PetscFree3(inoffset,outones,outoffset);CHKERRQ(ierr);
918   }
919   *multi = sf->multi;
920   PetscFunctionReturn(0);
921 }
922 
923 /*@C
924    PetscSFCreateEmbeddedSF - removes edges from all but the selected roots, does not remap indices
925 
926    Collective
927 
928    Input Arguments:
929 +  sf - original star forest
930 .  nroots - number of roots to select on this process
931 -  selected - selected roots on this process
932 
933    Output Arguments:
934 .  newsf - new star forest
935 
936    Level: advanced
937 
938    Note:
939    To use the new PetscSF, it may be necessary to know the indices of the leaves that are still participating. This can
940    be done by calling PetscSFGetGraph().
941 
942 .seealso: PetscSFSetGraph(), PetscSFGetGraph()
943 @*/
944 PetscErrorCode PetscSFCreateEmbeddedSF(PetscSF sf,PetscInt nroots,const PetscInt *selected,PetscSF *newsf)
945 {
946   PetscInt      *rootdata, *leafdata, *ilocal;
947   PetscSFNode   *iremote;
948   PetscInt       leafsize = 0, nleaves = 0, n, i;
949   PetscErrorCode ierr;
950 
951   PetscFunctionBegin;
952   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
953   PetscSFCheckGraphSet(sf,1);
954   if (nroots) PetscValidPointer(selected,3);
955   PetscValidPointer(newsf,4);
956   if (sf->mine) for (i = 0; i < sf->nleaves; ++i) {leafsize = PetscMax(leafsize, sf->mine[i]+1);}
957   else leafsize = sf->nleaves;
958   ierr = PetscCalloc2(sf->nroots,&rootdata,leafsize,&leafdata);CHKERRQ(ierr);
959   for (i=0; i<nroots; ++i) rootdata[selected[i]] = 1;
960   ierr = PetscSFBcastBegin(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr);
961   ierr = PetscSFBcastEnd(sf,MPIU_INT,rootdata,leafdata);CHKERRQ(ierr);
962   for (i = 0; i < leafsize; ++i) nleaves += leafdata[i];
963   ierr = PetscMalloc1(nleaves,&ilocal);CHKERRQ(ierr);
964   ierr = PetscMalloc1(nleaves,&iremote);CHKERRQ(ierr);
965   for (i = 0, n = 0; i < sf->nleaves; ++i) {
966     const PetscInt lidx = sf->mine ? sf->mine[i] : i;
967 
968     if (leafdata[lidx]) {
969       ilocal[n]        = lidx;
970       iremote[n].rank  = sf->remote[i].rank;
971       iremote[n].index = sf->remote[i].index;
972       ++n;
973     }
974   }
975   if (n != nleaves) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_PLIB, "There is a size mismatch in the SF embedding, %d != %d", n, nleaves);
976   ierr = PetscSFDuplicate(sf,PETSCSF_DUPLICATE_RANKS,newsf);CHKERRQ(ierr);
977   ierr = PetscSFSetGraph(*newsf,sf->nroots,nleaves,ilocal,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
978   ierr = PetscFree2(rootdata,leafdata);CHKERRQ(ierr);
979   PetscFunctionReturn(0);
980 }
981 
982 /*@C
983   PetscSFCreateEmbeddedLeafSF - removes edges from all but the selected leaves, does not remap indices
984 
985   Collective
986 
987   Input Arguments:
988 + sf - original star forest
989 . nleaves - number of leaves to select on this process
990 - selected - selected leaves on this process
991 
992   Output Arguments:
993 .  newsf - new star forest
994 
995   Level: advanced
996 
997 .seealso: PetscSFCreateEmbeddedSF(), PetscSFSetGraph(), PetscSFGetGraph()
998 @*/
999 PetscErrorCode PetscSFCreateEmbeddedLeafSF(PetscSF sf, PetscInt nleaves, const PetscInt *selected, PetscSF *newsf)
1000 {
1001   PetscSFNode   *iremote;
1002   PetscInt      *ilocal;
1003   PetscInt       i;
1004   PetscErrorCode ierr;
1005 
1006   PetscFunctionBegin;
1007   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
1008   PetscSFCheckGraphSet(sf, 1);
1009   if (nleaves) PetscValidPointer(selected, 3);
1010   PetscValidPointer(newsf, 4);
1011   ierr = PetscMalloc1(nleaves, &ilocal);CHKERRQ(ierr);
1012   ierr = PetscMalloc1(nleaves, &iremote);CHKERRQ(ierr);
1013   for (i = 0; i < nleaves; ++i) {
1014     const PetscInt l = selected[i];
1015 
1016     ilocal[i]        = sf->mine ? sf->mine[l] : l;
1017     iremote[i].rank  = sf->remote[l].rank;
1018     iremote[i].index = sf->remote[l].index;
1019   }
1020   ierr = PetscSFDuplicate(sf, PETSCSF_DUPLICATE_RANKS, newsf);CHKERRQ(ierr);
1021   ierr = PetscSFSetGraph(*newsf, sf->nroots, nleaves, ilocal, PETSC_OWN_POINTER, iremote, PETSC_OWN_POINTER);CHKERRQ(ierr);
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 /*@C
1026    PetscSFBcastBegin - begin pointwise broadcast to be concluded with call to PetscSFBcastEnd()
1027 
1028    Collective on PetscSF
1029 
1030    Input Arguments:
1031 +  sf - star forest on which to communicate
1032 .  unit - data type associated with each node
1033 -  rootdata - buffer to broadcast
1034 
1035    Output Arguments:
1036 .  leafdata - buffer to update with values from each leaf's respective root
1037 
1038    Level: intermediate
1039 
1040 .seealso: PetscSFCreate(), PetscSFSetGraph(), PetscSFView(), PetscSFBcastEnd(), PetscSFReduceBegin()
1041 @*/
1042 PetscErrorCode PetscSFBcastBegin(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
1043 {
1044   PetscErrorCode ierr;
1045 
1046   PetscFunctionBegin;
1047   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1048   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1049   ierr = PetscLogEventBegin(PETSCSF_BcastBegin,sf,0,0,0);CHKERRQ(ierr);
1050   ierr = (*sf->ops->BcastBegin)(sf,unit,rootdata,leafdata);CHKERRQ(ierr);
1051   ierr = PetscLogEventEnd(PETSCSF_BcastBegin,sf,0,0,0);CHKERRQ(ierr);
1052   PetscFunctionReturn(0);
1053 }
1054 
1055 /*@C
1056    PetscSFBcastEnd - end a broadcast operation started with PetscSFBcastBegin()
1057 
1058    Collective
1059 
1060    Input Arguments:
1061 +  sf - star forest
1062 .  unit - data type
1063 -  rootdata - buffer to broadcast
1064 
1065    Output Arguments:
1066 .  leafdata - buffer to update with values from each leaf's respective root
1067 
1068    Level: intermediate
1069 
1070 .seealso: PetscSFSetGraph(), PetscSFReduceEnd()
1071 @*/
1072 PetscErrorCode PetscSFBcastEnd(PetscSF sf,MPI_Datatype unit,const void *rootdata,void *leafdata)
1073 {
1074   PetscErrorCode ierr;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1078   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1079   ierr = PetscLogEventBegin(PETSCSF_BcastEnd,sf,0,0,0);CHKERRQ(ierr);
1080   ierr = (*sf->ops->BcastEnd)(sf,unit,rootdata,leafdata);CHKERRQ(ierr);
1081   ierr = PetscLogEventEnd(PETSCSF_BcastEnd,sf,0,0,0);CHKERRQ(ierr);
1082   PetscFunctionReturn(0);
1083 }
1084 
1085 /*@C
1086    PetscSFReduceBegin - begin reduction of leafdata into rootdata, to be completed with call to PetscSFReduceEnd()
1087 
1088    Collective
1089 
1090    Input Arguments:
1091 +  sf - star forest
1092 .  unit - data type
1093 .  leafdata - values to reduce
1094 -  op - reduction operation
1095 
1096    Output Arguments:
1097 .  rootdata - result of reduction of values from all leaves of each root
1098 
1099    Level: intermediate
1100 
1101 .seealso: PetscSFBcastBegin()
1102 @*/
1103 PetscErrorCode PetscSFReduceBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1104 {
1105   PetscErrorCode ierr;
1106 
1107   PetscFunctionBegin;
1108   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1109   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1110   ierr = PetscLogEventBegin(PETSCSF_ReduceBegin,sf,0,0,0);CHKERRQ(ierr);
1111   ierr = (sf->ops->ReduceBegin)(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr);
1112   ierr = PetscLogEventEnd(PETSCSF_ReduceBegin,sf,0,0,0);CHKERRQ(ierr);
1113   PetscFunctionReturn(0);
1114 }
1115 
1116 /*@C
1117    PetscSFReduceEnd - end a reduction operation started with PetscSFReduceBegin()
1118 
1119    Collective
1120 
1121    Input Arguments:
1122 +  sf - star forest
1123 .  unit - data type
1124 .  leafdata - values to reduce
1125 -  op - reduction operation
1126 
1127    Output Arguments:
1128 .  rootdata - result of reduction of values from all leaves of each root
1129 
1130    Level: intermediate
1131 
1132 .seealso: PetscSFSetGraph(), PetscSFBcastEnd()
1133 @*/
1134 PetscErrorCode PetscSFReduceEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *rootdata,MPI_Op op)
1135 {
1136   PetscErrorCode ierr;
1137 
1138   PetscFunctionBegin;
1139   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1140   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1141   ierr = PetscLogEventBegin(PETSCSF_ReduceEnd,sf,0,0,0);CHKERRQ(ierr);
1142   ierr = (*sf->ops->ReduceEnd)(sf,unit,leafdata,rootdata,op);CHKERRQ(ierr);
1143   ierr = PetscLogEventEnd(PETSCSF_ReduceEnd,sf,0,0,0);CHKERRQ(ierr);
1144   PetscFunctionReturn(0);
1145 }
1146 
1147 /*@C
1148    PetscSFComputeDegreeBegin - begin computation of degree for each root vertex, to be completed with PetscSFComputeDegreeEnd()
1149 
1150    Collective
1151 
1152    Input Arguments:
1153 .  sf - star forest
1154 
1155    Output Arguments:
1156 .  degree - degree of each root vertex
1157 
1158    Level: advanced
1159 
1160 .seealso: PetscSFGatherBegin()
1161 @*/
1162 PetscErrorCode PetscSFComputeDegreeBegin(PetscSF sf,const PetscInt **degree)
1163 {
1164   PetscErrorCode ierr;
1165 
1166   PetscFunctionBegin;
1167   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1168   PetscSFCheckGraphSet(sf,1);
1169   PetscValidPointer(degree,2);
1170   if (!sf->degreeknown) {
1171     PetscInt i, nroots = sf->nroots, maxlocal = sf->maxleaf+1;  /* TODO: We should use PetscSFGetLeafRange() */
1172     if (sf->degree) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Calls to PetscSFComputeDegreeBegin() cannot be nested.");
1173     ierr = PetscMalloc1(nroots,&sf->degree);CHKERRQ(ierr);
1174     ierr = PetscMalloc1(PetscMax(maxlocal,1),&sf->degreetmp);CHKERRQ(ierr); /* allocate at least one entry, see check in PetscSFComputeDegreeEnd() */
1175     for (i=0; i<nroots; i++) sf->degree[i] = 0;
1176     for (i=0; i<maxlocal; i++) sf->degreetmp[i] = 1;
1177     ierr = PetscSFReduceBegin(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);CHKERRQ(ierr);
1178   }
1179   *degree = NULL;
1180   PetscFunctionReturn(0);
1181 }
1182 
1183 /*@C
1184    PetscSFComputeDegreeEnd - complete computation of degree for each root vertex, started with PetscSFComputeDegreeBegin()
1185 
1186    Collective
1187 
1188    Input Arguments:
1189 .  sf - star forest
1190 
1191    Output Arguments:
1192 .  degree - degree of each root vertex
1193 
1194    Level: developer
1195 
1196 .seealso:
1197 @*/
1198 PetscErrorCode PetscSFComputeDegreeEnd(PetscSF sf,const PetscInt **degree)
1199 {
1200   PetscErrorCode ierr;
1201 
1202   PetscFunctionBegin;
1203   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1204   PetscSFCheckGraphSet(sf,1);
1205   PetscValidPointer(degree,2);
1206   if (!sf->degreeknown) {
1207     if (!sf->degreetmp) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Must call PetscSFComputeDegreeBegin() before PetscSFComputeDegreeEnd()");
1208     ierr = PetscSFReduceEnd(sf,MPIU_INT,sf->degreetmp,sf->degree,MPI_SUM);CHKERRQ(ierr);
1209     ierr = PetscFree(sf->degreetmp);CHKERRQ(ierr);
1210     sf->degreeknown = PETSC_TRUE;
1211   }
1212   *degree = sf->degree;
1213   PetscFunctionReturn(0);
1214 }
1215 
1216 /*@C
1217    PetscSFFetchAndOpBegin - begin operation that fetches values from root and updates atomically by applying operation using my leaf value, to be completed with PetscSFFetchAndOpEnd()
1218 
1219    Collective
1220 
1221    Input Arguments:
1222 +  sf - star forest
1223 .  unit - data type
1224 .  leafdata - leaf values to use in reduction
1225 -  op - operation to use for reduction
1226 
1227    Output Arguments:
1228 +  rootdata - root values to be updated, input state is seen by first process to perform an update
1229 -  leafupdate - state at each leaf's respective root immediately prior to my atomic update
1230 
1231    Level: advanced
1232 
1233    Note:
1234    The update is only atomic at the granularity provided by the hardware. Different roots referenced by the same process
1235    might be updated in a different order. Furthermore, if a composite type is used for the unit datatype, atomicity is
1236    not guaranteed across the whole vertex. Therefore, this function is mostly only used with primitive types such as
1237    integers.
1238 
1239 .seealso: PetscSFComputeDegreeBegin(), PetscSFReduceBegin(), PetscSFSetGraph()
1240 @*/
1241 PetscErrorCode PetscSFFetchAndOpBegin(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1242 {
1243   PetscErrorCode ierr;
1244 
1245   PetscFunctionBegin;
1246   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1247   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1248   ierr = PetscLogEventBegin(PETSCSF_FetchAndOpBegin,sf,0,0,0);CHKERRQ(ierr);
1249   ierr = (*sf->ops->FetchAndOpBegin)(sf,unit,rootdata,leafdata,leafupdate,op);CHKERRQ(ierr);
1250   ierr = PetscLogEventEnd(PETSCSF_FetchAndOpBegin,sf,0,0,0);CHKERRQ(ierr);
1251   PetscFunctionReturn(0);
1252 }
1253 
1254 /*@C
1255    PetscSFFetchAndOpEnd - end operation started in matching call to PetscSFFetchAndOpBegin() to fetch values from roots and update atomically by applying operation using my leaf value
1256 
1257    Collective
1258 
1259    Input Arguments:
1260 +  sf - star forest
1261 .  unit - data type
1262 .  leafdata - leaf values to use in reduction
1263 -  op - operation to use for reduction
1264 
1265    Output Arguments:
1266 +  rootdata - root values to be updated, input state is seen by first process to perform an update
1267 -  leafupdate - state at each leaf's respective root immediately prior to my atomic update
1268 
1269    Level: advanced
1270 
1271 .seealso: PetscSFComputeDegreeEnd(), PetscSFReduceEnd(), PetscSFSetGraph()
1272 @*/
1273 PetscErrorCode PetscSFFetchAndOpEnd(PetscSF sf,MPI_Datatype unit,void *rootdata,const void *leafdata,void *leafupdate,MPI_Op op)
1274 {
1275   PetscErrorCode ierr;
1276 
1277   PetscFunctionBegin;
1278   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1279   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1280   ierr = PetscLogEventBegin(PETSCSF_FetchAndOpEnd,sf,0,0,0);CHKERRQ(ierr);
1281   ierr = (*sf->ops->FetchAndOpEnd)(sf,unit,rootdata,leafdata,leafupdate,op);CHKERRQ(ierr);
1282   ierr = PetscLogEventEnd(PETSCSF_FetchAndOpEnd,sf,0,0,0);CHKERRQ(ierr);
1283   PetscFunctionReturn(0);
1284 }
1285 
1286 /*@C
1287    PetscSFGatherBegin - begin pointwise gather of all leaves into multi-roots, to be completed with PetscSFGatherEnd()
1288 
1289    Collective
1290 
1291    Input Arguments:
1292 +  sf - star forest
1293 .  unit - data type
1294 -  leafdata - leaf data to gather to roots
1295 
1296    Output Argument:
1297 .  multirootdata - root buffer to gather into, amount of space per root is equal to its degree
1298 
1299    Level: intermediate
1300 
1301 .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1302 @*/
1303 PetscErrorCode PetscSFGatherBegin(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1304 {
1305   PetscErrorCode ierr;
1306   PetscSF        multi;
1307 
1308   PetscFunctionBegin;
1309   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1310   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1311   ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr);
1312   ierr = PetscSFReduceBegin(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr);
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 /*@C
1317    PetscSFGatherEnd - ends pointwise gather operation that was started with PetscSFGatherBegin()
1318 
1319    Collective
1320 
1321    Input Arguments:
1322 +  sf - star forest
1323 .  unit - data type
1324 -  leafdata - leaf data to gather to roots
1325 
1326    Output Argument:
1327 .  multirootdata - root buffer to gather into, amount of space per root is equal to its degree
1328 
1329    Level: intermediate
1330 
1331 .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1332 @*/
1333 PetscErrorCode PetscSFGatherEnd(PetscSF sf,MPI_Datatype unit,const void *leafdata,void *multirootdata)
1334 {
1335   PetscErrorCode ierr;
1336   PetscSF        multi;
1337 
1338   PetscFunctionBegin;
1339   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1340   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1341   ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr);
1342   ierr = PetscSFReduceEnd(multi,unit,leafdata,multirootdata,MPIU_REPLACE);CHKERRQ(ierr);
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 /*@C
1347    PetscSFScatterBegin - begin pointwise scatter operation from multi-roots to leaves, to be completed with PetscSFScatterEnd()
1348 
1349    Collective
1350 
1351    Input Arguments:
1352 +  sf - star forest
1353 .  unit - data type
1354 -  multirootdata - root buffer to send to each leaf, one unit of data per leaf
1355 
1356    Output Argument:
1357 .  leafdata - leaf data to be update with personal data from each respective root
1358 
1359    Level: intermediate
1360 
1361 .seealso: PetscSFComputeDegreeBegin(), PetscSFScatterBegin()
1362 @*/
1363 PetscErrorCode PetscSFScatterBegin(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1364 {
1365   PetscErrorCode ierr;
1366   PetscSF        multi;
1367 
1368   PetscFunctionBegin;
1369   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1370   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1371   ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr);
1372   ierr = PetscSFBcastBegin(multi,unit,multirootdata,leafdata);CHKERRQ(ierr);
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 /*@C
1377    PetscSFScatterEnd - ends pointwise scatter operation that was started with PetscSFScatterBegin()
1378 
1379    Collective
1380 
1381    Input Arguments:
1382 +  sf - star forest
1383 .  unit - data type
1384 -  multirootdata - root buffer to send to each leaf, one unit of data per leaf
1385 
1386    Output Argument:
1387 .  leafdata - leaf data to be update with personal data from each respective root
1388 
1389    Level: intermediate
1390 
1391 .seealso: PetscSFComputeDegreeEnd(), PetscSFScatterEnd()
1392 @*/
1393 PetscErrorCode PetscSFScatterEnd(PetscSF sf,MPI_Datatype unit,const void *multirootdata,void *leafdata)
1394 {
1395   PetscErrorCode ierr;
1396   PetscSF        multi;
1397 
1398   PetscFunctionBegin;
1399   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
1400   ierr = PetscSFSetUp(sf);CHKERRQ(ierr);
1401   ierr = PetscSFGetMultiSF(sf,&multi);CHKERRQ(ierr);
1402   ierr = PetscSFBcastEnd(multi,unit,multirootdata,leafdata);CHKERRQ(ierr);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 /*@
1407   PetscSFCompose - Compose a new PetscSF equivalent to action to PetscSFs
1408 
1409   Input Parameters:
1410 + sfA - The first PetscSF
1411 - sfB - The second PetscSF
1412 
1413   Output Parameters:
1414 . sfBA - equvalent PetscSF for applying A then B
1415 
1416   Level: developer
1417 
1418 .seealso: PetscSF, PetscSFGetGraph(), PetscSFSetGraph()
1419 @*/
1420 PetscErrorCode PetscSFCompose(PetscSF sfA, PetscSF sfB, PetscSF *sfBA)
1421 {
1422   MPI_Comm           comm;
1423   const PetscSFNode *remotePointsA, *remotePointsB;
1424   PetscSFNode       *remotePointsBA;
1425   const PetscInt    *localPointsA, *localPointsB;
1426   PetscInt           numRootsA, numLeavesA, numRootsB, numLeavesB;
1427   PetscErrorCode     ierr;
1428 
1429   PetscFunctionBegin;
1430   PetscValidHeaderSpecific(sfA, PETSCSF_CLASSID, 1);
1431   PetscSFCheckGraphSet(sfA, 1);
1432   PetscValidHeaderSpecific(sfB, PETSCSF_CLASSID, 2);
1433   PetscSFCheckGraphSet(sfB, 2);
1434   PetscValidPointer(sfBA, 3);
1435   ierr = PetscObjectGetComm((PetscObject) sfA, &comm);CHKERRQ(ierr);
1436   ierr = PetscSFGetGraph(sfA, &numRootsA, &numLeavesA, &localPointsA, &remotePointsA);CHKERRQ(ierr);
1437   ierr = PetscSFGetGraph(sfB, &numRootsB, &numLeavesB, &localPointsB, &remotePointsB);CHKERRQ(ierr);
1438   ierr = PetscMalloc1(numLeavesB, &remotePointsBA);CHKERRQ(ierr);
1439   ierr = PetscSFBcastBegin(sfB, MPIU_2INT, remotePointsA, remotePointsBA);CHKERRQ(ierr);
1440   ierr = PetscSFBcastEnd(sfB, MPIU_2INT, remotePointsA, remotePointsBA);CHKERRQ(ierr);
1441   ierr = PetscSFCreate(comm, sfBA);CHKERRQ(ierr);
1442   ierr = PetscSFSetGraph(*sfBA, numRootsA, numLeavesB, localPointsB, PETSC_COPY_VALUES, remotePointsBA, PETSC_OWN_POINTER);CHKERRQ(ierr);
1443   PetscFunctionReturn(0);
1444 }
1445