xref: /petsc/src/vec/is/sf/utils/sfutils.c (revision efa12513287cff49a2b9648ae83199dcbfaad71a)
1 #include <petsc/private/sfimpl.h>       /*I  "petscsf.h"   I*/
2 #include <petsc/private/sectionimpl.h>
3 
4 /*@C
5    PetscSFSetGraphLayout - Set a parallel star forest via global indices and a PetscLayout
6 
7    Collective
8 
9    Input Arguments:
10 +  sf - star forest
11 .  layout - PetscLayout defining the global space
12 .  nleaves - number of leaf vertices on the current process, each of these references a root on any process
13 .  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
14 .  localmode - copy mode for ilocal
15 -  iremote - remote locations of root vertices for each leaf on the current process
16 
17    Level: intermediate
18 
19    Developers Note: Local indices which are the identity permutation in the range [0,nleaves) are discarded as they
20    encode contiguous storage. In such case, if localmode is PETSC_OWN_POINTER, the memory is deallocated as it is not
21    needed
22 
23 .seealso: PetscSFCreate(), PetscSFView(), PetscSFSetGraph(), PetscSFGetGraph()
24 @*/
25 PetscErrorCode PetscSFSetGraphLayout(PetscSF sf,PetscLayout layout,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscInt *iremote)
26 {
27   PetscErrorCode ierr;
28   const PetscInt *range;
29   PetscInt       i, nroots, ls = -1, ln = -1;
30   PetscMPIInt    lr = -1;
31   PetscSFNode    *remote;
32 
33   PetscFunctionBegin;
34   ierr = PetscLayoutGetLocalSize(layout,&nroots);CHKERRQ(ierr);
35   ierr = PetscLayoutGetRanges(layout,&range);CHKERRQ(ierr);
36   ierr = PetscMalloc1(nleaves,&remote);CHKERRQ(ierr);
37   if (nleaves) { ls = iremote[0] + 1; }
38   for (i=0; i<nleaves; i++) {
39     const PetscInt idx = iremote[i] - ls;
40     if (idx < 0 || idx >= ln) { /* short-circuit the search */
41       ierr = PetscLayoutFindOwnerIndex(layout,iremote[i],&lr,&remote[i].index);CHKERRQ(ierr);
42       remote[i].rank = lr;
43       ls = range[lr];
44       ln = range[lr+1] - ls;
45     } else {
46       remote[i].rank  = lr;
47       remote[i].index = idx;
48     }
49   }
50   ierr = PetscSFSetGraph(sf,nroots,nleaves,ilocal,localmode,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
51   PetscFunctionReturn(0);
52 }
53 
54 /*@
55   PetscSFSetGraphSection - Sets the PetscSF graph encoding the parallel dof overlap based upon the PetscSections
56   describing the data layout.
57 
58   Input Parameters:
59 + sf - The SF
60 . localSection - PetscSection describing the local data layout
61 - globalSection - PetscSection describing the global data layout
62 
63   Level: developer
64 
65 .seealso: PetscSFSetGraph(), PetscSFSetGraphLayout()
66 @*/
67 PetscErrorCode PetscSFSetGraphSection(PetscSF sf, PetscSection localSection, PetscSection globalSection)
68 {
69   MPI_Comm       comm;
70   PetscLayout    layout;
71   const PetscInt *ranges;
72   PetscInt       *local;
73   PetscSFNode    *remote;
74   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
75   PetscMPIInt    size, rank;
76   PetscErrorCode ierr;
77 
78   PetscFunctionBegin;
79   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
80   PetscValidHeaderSpecific(localSection, PETSC_SECTION_CLASSID, 2);
81   PetscValidHeaderSpecific(globalSection, PETSC_SECTION_CLASSID, 3);
82 
83   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
84   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
85   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
86   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
87   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
88   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
89   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
90   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
91   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
92   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
93   for (p = pStart; p < pEnd; ++p) {
94     PetscInt gdof, gcdof;
95 
96     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
97     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
98     if (gcdof > (gdof < 0 ? -(gdof+1) : gdof)) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %D has %D constraints > %D dof", p, gcdof, (gdof < 0 ? -(gdof+1) : gdof));
99     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
100   }
101   ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr);
102   ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr);
103   for (p = pStart, l = 0; p < pEnd; ++p) {
104     const PetscInt *cind;
105     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
106 
107     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
108     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
109     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
110     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
111     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
112     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
113     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
114     if (!gdof) continue; /* Censored point */
115     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
116     if (gsize != dof-cdof) {
117       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %D for point %D is neither the constrained size %D, nor the unconstrained %D", gsize, p, dof-cdof, dof);
118       cdof = 0; /* Ignore constraints */
119     }
120     for (d = 0, c = 0; d < dof; ++d) {
121       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
122       local[l+d-c] = off+d;
123     }
124     if (gdof < 0) {
125       for (d = 0; d < gsize; ++d, ++l) {
126         PetscInt offset = -(goff+1) + d, r;
127 
128         ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr);
129         if (r < 0) r = -(r+2);
130         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %D mapped to invalid process %D (%D, %D)", p, r, gdof, goff);
131         remote[l].rank  = r;
132         remote[l].index = offset - ranges[r];
133       }
134     } else {
135       for (d = 0; d < gsize; ++d, ++l) {
136         remote[l].rank  = rank;
137         remote[l].index = goff+d - ranges[rank];
138       }
139     }
140   }
141   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %D != nleaves %D", l, nleaves);
142   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
143   ierr = PetscSFSetGraph(sf, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
144   PetscFunctionReturn(0);
145 }
146 
147 static PetscErrorCode PetscSectionCheckConstraints_Static(PetscSection s)
148 {
149   PetscErrorCode ierr;
150 
151   PetscFunctionBegin;
152   if (!s->bc) {
153     ierr = PetscSectionCreate(PETSC_COMM_SELF, &s->bc);CHKERRQ(ierr);
154     ierr = PetscSectionSetChart(s->bc, s->pStart, s->pEnd);CHKERRQ(ierr);
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 /*@C
160   PetscSFDistributeSection - Create a new PetscSection reorganized, moving from the root to the leaves of the SF
161 
162   Collective on sf
163 
164   Input Parameters:
165 + sf - The SF
166 - rootSection - Section defined on root space
167 
168   Output Parameters:
169 + remoteOffsets - root offsets in leaf storage, or NULL
170 - leafSection - Section defined on the leaf space
171 
172   Level: advanced
173 
174 .seealso: PetscSFCreate()
175 @*/
176 PetscErrorCode PetscSFDistributeSection(PetscSF sf, PetscSection rootSection, PetscInt **remoteOffsets, PetscSection leafSection)
177 {
178   PetscSF        embedSF;
179   const PetscInt *indices;
180   IS             selected;
181   PetscInt       numFields, nroots, rpStart, rpEnd, lpStart = PETSC_MAX_INT, lpEnd = -1, f, c;
182   PetscBool      *sub, hasc;
183   PetscErrorCode ierr;
184 
185   PetscFunctionBegin;
186   ierr = PetscLogEventBegin(PETSCSF_DistSect,sf,0,0,0);CHKERRQ(ierr);
187   ierr = PetscSectionGetNumFields(rootSection, &numFields);CHKERRQ(ierr);
188   if (numFields) {ierr = PetscSectionSetNumFields(leafSection, numFields);CHKERRQ(ierr);}
189   ierr = PetscMalloc1(numFields+2, &sub);CHKERRQ(ierr);
190   sub[1] = rootSection->bc ? PETSC_TRUE : PETSC_FALSE;
191   for (f = 0; f < numFields; ++f) {
192     PetscSectionSym sym;
193     const char      *name   = NULL;
194     PetscInt        numComp = 0;
195 
196     sub[2 + f] = rootSection->field[f]->bc ? PETSC_TRUE : PETSC_FALSE;
197     ierr = PetscSectionGetFieldComponents(rootSection, f, &numComp);CHKERRQ(ierr);
198     ierr = PetscSectionGetFieldName(rootSection, f, &name);CHKERRQ(ierr);
199     ierr = PetscSectionGetFieldSym(rootSection, f, &sym);CHKERRQ(ierr);
200     ierr = PetscSectionSetFieldComponents(leafSection, f, numComp);CHKERRQ(ierr);
201     ierr = PetscSectionSetFieldName(leafSection, f, name);CHKERRQ(ierr);
202     ierr = PetscSectionSetFieldSym(leafSection, f, sym);CHKERRQ(ierr);
203     for (c = 0; c < rootSection->numFieldComponents[f]; ++c) {
204       ierr = PetscSectionGetComponentName(rootSection, f, c, &name);CHKERRQ(ierr);
205       ierr = PetscSectionSetComponentName(leafSection, f, c, name);CHKERRQ(ierr);
206     }
207   }
208   ierr = PetscSectionGetChart(rootSection, &rpStart, &rpEnd);CHKERRQ(ierr);
209   ierr = PetscSFGetGraph(sf,&nroots,NULL,NULL,NULL);CHKERRQ(ierr);
210   rpEnd = PetscMin(rpEnd,nroots);
211   rpEnd = PetscMax(rpStart,rpEnd);
212   /* see if we can avoid creating the embedded SF, since it can cost more than an allreduce */
213   sub[0] = (PetscBool)(nroots != rpEnd - rpStart);
214   ierr = MPIU_Allreduce(MPI_IN_PLACE, sub, 2+numFields, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr);
215   if (sub[0]) {
216     ierr = ISCreateStride(PETSC_COMM_SELF, rpEnd - rpStart, rpStart, 1, &selected);CHKERRQ(ierr);
217     ierr = ISGetIndices(selected, &indices);CHKERRQ(ierr);
218     ierr = PetscSFCreateEmbeddedSF(sf, rpEnd - rpStart, indices, &embedSF);CHKERRQ(ierr);
219     ierr = ISRestoreIndices(selected, &indices);CHKERRQ(ierr);
220     ierr = ISDestroy(&selected);CHKERRQ(ierr);
221   } else {
222     ierr = PetscObjectReference((PetscObject)sf);CHKERRQ(ierr);
223     embedSF = sf;
224   }
225   ierr = PetscSFGetLeafRange(embedSF, &lpStart, &lpEnd);CHKERRQ(ierr);
226   lpEnd++;
227 
228   ierr = PetscSectionSetChart(leafSection, lpStart, lpEnd);CHKERRQ(ierr);
229 
230   /* Constrained dof section */
231   hasc = sub[1];
232   for (f = 0; f < numFields; ++f) hasc = (PetscBool)(hasc || sub[2+f]);
233 
234   /* Could fuse these at the cost of copies and extra allocation */
235   ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->atlasDof[-rpStart], &leafSection->atlasDof[-lpStart]);CHKERRQ(ierr);
236   ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->atlasDof[-rpStart], &leafSection->atlasDof[-lpStart]);CHKERRQ(ierr);
237   if (sub[1]) {
238     ierr = PetscSectionCheckConstraints_Static(rootSection);CHKERRQ(ierr);
239     ierr = PetscSectionCheckConstraints_Static(leafSection);CHKERRQ(ierr);
240     ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->bc->atlasDof[-rpStart], &leafSection->bc->atlasDof[-lpStart]);CHKERRQ(ierr);
241     ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->bc->atlasDof[-rpStart], &leafSection->bc->atlasDof[-lpStart]);CHKERRQ(ierr);
242   }
243   for (f = 0; f < numFields; ++f) {
244     ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->field[f]->atlasDof[-rpStart], &leafSection->field[f]->atlasDof[-lpStart]);CHKERRQ(ierr);
245     ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->field[f]->atlasDof[-rpStart], &leafSection->field[f]->atlasDof[-lpStart]);CHKERRQ(ierr);
246     if (sub[2+f]) {
247       ierr = PetscSectionCheckConstraints_Static(rootSection->field[f]);CHKERRQ(ierr);
248       ierr = PetscSectionCheckConstraints_Static(leafSection->field[f]);CHKERRQ(ierr);
249       ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->field[f]->bc->atlasDof[-rpStart], &leafSection->field[f]->bc->atlasDof[-lpStart]);CHKERRQ(ierr);
250       ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->field[f]->bc->atlasDof[-rpStart], &leafSection->field[f]->bc->atlasDof[-lpStart]);CHKERRQ(ierr);
251     }
252   }
253   if (remoteOffsets) {
254     ierr = PetscMalloc1(lpEnd - lpStart, remoteOffsets);CHKERRQ(ierr);
255     ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->atlasOff[-rpStart], &(*remoteOffsets)[-lpStart]);CHKERRQ(ierr);
256     ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->atlasOff[-rpStart], &(*remoteOffsets)[-lpStart]);CHKERRQ(ierr);
257   }
258   ierr = PetscSectionSetUp(leafSection);CHKERRQ(ierr);
259   if (hasc) { /* need to communicate bcIndices */
260     PetscSF  bcSF;
261     PetscInt *rOffBc;
262 
263     ierr = PetscMalloc1(lpEnd - lpStart, &rOffBc);CHKERRQ(ierr);
264     if (sub[1]) {
265       ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->bc->atlasOff[-rpStart], &rOffBc[-lpStart]);CHKERRQ(ierr);
266       ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->bc->atlasOff[-rpStart], &rOffBc[-lpStart]);CHKERRQ(ierr);
267       ierr = PetscSFCreateSectionSF(embedSF, rootSection->bc, rOffBc, leafSection->bc, &bcSF);CHKERRQ(ierr);
268       ierr = PetscSFBcastBegin(bcSF, MPIU_INT, rootSection->bcIndices, leafSection->bcIndices);CHKERRQ(ierr);
269       ierr = PetscSFBcastEnd(bcSF, MPIU_INT, rootSection->bcIndices, leafSection->bcIndices);CHKERRQ(ierr);
270       ierr = PetscSFDestroy(&bcSF);CHKERRQ(ierr);
271     }
272     for (f = 0; f < numFields; ++f) {
273       if (sub[2+f]) {
274         ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->field[f]->bc->atlasOff[-rpStart], &rOffBc[-lpStart]);CHKERRQ(ierr);
275         ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->field[f]->bc->atlasOff[-rpStart], &rOffBc[-lpStart]);CHKERRQ(ierr);
276         ierr = PetscSFCreateSectionSF(embedSF, rootSection->field[f]->bc, rOffBc, leafSection->field[f]->bc, &bcSF);CHKERRQ(ierr);
277         ierr = PetscSFBcastBegin(bcSF, MPIU_INT, rootSection->field[f]->bcIndices, leafSection->field[f]->bcIndices);CHKERRQ(ierr);
278         ierr = PetscSFBcastEnd(bcSF, MPIU_INT, rootSection->field[f]->bcIndices, leafSection->field[f]->bcIndices);CHKERRQ(ierr);
279         ierr = PetscSFDestroy(&bcSF);CHKERRQ(ierr);
280       }
281     }
282     ierr = PetscFree(rOffBc);CHKERRQ(ierr);
283   }
284   ierr = PetscSFDestroy(&embedSF);CHKERRQ(ierr);
285   ierr = PetscFree(sub);CHKERRQ(ierr);
286   ierr = PetscLogEventEnd(PETSCSF_DistSect,sf,0,0,0);CHKERRQ(ierr);
287   PetscFunctionReturn(0);
288 }
289 
290 /*@C
291   PetscSFCreateRemoteOffsets - Create offsets for point data on remote processes
292 
293   Collective on sf
294 
295   Input Parameters:
296 + sf          - The SF
297 . rootSection - Data layout of remote points for outgoing data (this is layout for SF roots)
298 - leafSection - Data layout of local points for incoming data  (this is layout for SF leaves)
299 
300   Output Parameter:
301 . remoteOffsets - Offsets for point data on remote processes (these are offsets from the root section), or NULL
302 
303   Level: developer
304 
305 .seealso: PetscSFCreate()
306 @*/
307 PetscErrorCode PetscSFCreateRemoteOffsets(PetscSF sf, PetscSection rootSection, PetscSection leafSection, PetscInt **remoteOffsets)
308 {
309   PetscSF         embedSF;
310   const PetscInt *indices;
311   IS              selected;
312   PetscInt        numRoots, rpStart = 0, rpEnd = 0, lpStart = 0, lpEnd = 0;
313   PetscErrorCode  ierr;
314 
315   PetscFunctionBegin;
316   *remoteOffsets = NULL;
317   ierr = PetscSFGetGraph(sf, &numRoots, NULL, NULL, NULL);CHKERRQ(ierr);
318   if (numRoots < 0) PetscFunctionReturn(0);
319   ierr = PetscLogEventBegin(PETSCSF_RemoteOff,sf,0,0,0);CHKERRQ(ierr);
320   ierr = PetscSectionGetChart(rootSection, &rpStart, &rpEnd);CHKERRQ(ierr);
321   ierr = PetscSectionGetChart(leafSection, &lpStart, &lpEnd);CHKERRQ(ierr);
322   ierr = ISCreateStride(PETSC_COMM_SELF, rpEnd - rpStart, rpStart, 1, &selected);CHKERRQ(ierr);
323   ierr = ISGetIndices(selected, &indices);CHKERRQ(ierr);
324   ierr = PetscSFCreateEmbeddedSF(sf, rpEnd - rpStart, indices, &embedSF);CHKERRQ(ierr);
325   ierr = ISRestoreIndices(selected, &indices);CHKERRQ(ierr);
326   ierr = ISDestroy(&selected);CHKERRQ(ierr);
327   ierr = PetscCalloc1(lpEnd - lpStart, remoteOffsets);CHKERRQ(ierr);
328   ierr = PetscSFBcastBegin(embedSF, MPIU_INT, &rootSection->atlasOff[-rpStart], &(*remoteOffsets)[-lpStart]);CHKERRQ(ierr);
329   ierr = PetscSFBcastEnd(embedSF, MPIU_INT, &rootSection->atlasOff[-rpStart], &(*remoteOffsets)[-lpStart]);CHKERRQ(ierr);
330   ierr = PetscSFDestroy(&embedSF);CHKERRQ(ierr);
331   ierr = PetscLogEventEnd(PETSCSF_RemoteOff,sf,0,0,0);CHKERRQ(ierr);
332   PetscFunctionReturn(0);
333 }
334 
335 /*@C
336   PetscSFCreateSectionSF - Create an expanded SF of dofs, assuming the input SF relates points
337 
338   Collective on sf
339 
340   Input Parameters:
341 + sf - The SF
342 . rootSection - Data layout of remote points for outgoing data (this is usually the serial section)
343 . remoteOffsets - Offsets for point data on remote processes (these are offsets from the root section), or NULL
344 - leafSection - Data layout of local points for incoming data  (this is the distributed section)
345 
346   Output Parameters:
347 - sectionSF - The new SF
348 
349   Note: Either rootSection or remoteOffsets can be specified
350 
351   Level: advanced
352 
353 .seealso: PetscSFCreate()
354 @*/
355 PetscErrorCode PetscSFCreateSectionSF(PetscSF sf, PetscSection rootSection, PetscInt remoteOffsets[], PetscSection leafSection, PetscSF *sectionSF)
356 {
357   MPI_Comm          comm;
358   const PetscInt    *localPoints;
359   const PetscSFNode *remotePoints;
360   PetscInt          lpStart, lpEnd;
361   PetscInt          numRoots, numSectionRoots, numPoints, numIndices = 0;
362   PetscInt          *localIndices;
363   PetscSFNode       *remoteIndices;
364   PetscInt          i, ind;
365   PetscErrorCode    ierr;
366 
367   PetscFunctionBegin;
368   PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
369   PetscValidPointer(rootSection,2);
370   /* Cannot check PetscValidIntPointer(remoteOffsets,3) because it can be NULL if sf does not reference any points in leafSection */
371   PetscValidPointer(leafSection,4);
372   PetscValidPointer(sectionSF,5);
373   ierr = PetscObjectGetComm((PetscObject)sf,&comm);CHKERRQ(ierr);
374   ierr = PetscSFCreate(comm, sectionSF);CHKERRQ(ierr);
375   ierr = PetscSectionGetChart(leafSection, &lpStart, &lpEnd);CHKERRQ(ierr);
376   ierr = PetscSectionGetStorageSize(rootSection, &numSectionRoots);CHKERRQ(ierr);
377   ierr = PetscSFGetGraph(sf, &numRoots, &numPoints, &localPoints, &remotePoints);CHKERRQ(ierr);
378   if (numRoots < 0) PetscFunctionReturn(0);
379   ierr = PetscLogEventBegin(PETSCSF_SectSF,sf,0,0,0);CHKERRQ(ierr);
380   for (i = 0; i < numPoints; ++i) {
381     PetscInt localPoint = localPoints ? localPoints[i] : i;
382     PetscInt dof;
383 
384     if ((localPoint >= lpStart) && (localPoint < lpEnd)) {
385       ierr = PetscSectionGetDof(leafSection, localPoint, &dof);CHKERRQ(ierr);
386       numIndices += dof;
387     }
388   }
389   ierr = PetscMalloc1(numIndices, &localIndices);CHKERRQ(ierr);
390   ierr = PetscMalloc1(numIndices, &remoteIndices);CHKERRQ(ierr);
391   /* Create new index graph */
392   for (i = 0, ind = 0; i < numPoints; ++i) {
393     PetscInt localPoint = localPoints ? localPoints[i] : i;
394     PetscInt rank       = remotePoints[i].rank;
395 
396     if ((localPoint >= lpStart) && (localPoint < lpEnd)) {
397       PetscInt remoteOffset = remoteOffsets[localPoint-lpStart];
398       PetscInt loff, dof, d;
399 
400       ierr = PetscSectionGetOffset(leafSection, localPoint, &loff);CHKERRQ(ierr);
401       ierr = PetscSectionGetDof(leafSection, localPoint, &dof);CHKERRQ(ierr);
402       for (d = 0; d < dof; ++d, ++ind) {
403         localIndices[ind]        = loff+d;
404         remoteIndices[ind].rank  = rank;
405         remoteIndices[ind].index = remoteOffset+d;
406       }
407     }
408   }
409   if (numIndices != ind) SETERRQ2(comm, PETSC_ERR_PLIB, "Inconsistency in indices, %D should be %D", ind, numIndices);
410   ierr = PetscSFSetGraph(*sectionSF, numSectionRoots, numIndices, localIndices, PETSC_OWN_POINTER, remoteIndices, PETSC_OWN_POINTER);CHKERRQ(ierr);
411   ierr = PetscSFSetUp(*sectionSF);CHKERRQ(ierr);
412   ierr = PetscLogEventEnd(PETSCSF_SectSF,sf,0,0,0);CHKERRQ(ierr);
413   PetscFunctionReturn(0);
414 }
415 
416 /*@C
417    PetscSFCreateFromLayouts - Creates a parallel star forest mapping two PetscLayout objects
418 
419    Collective
420 
421    Input Arguments:
422 +  rmap - PetscLayout defining the global root space
423 -  lmap - PetscLayout defining the global leaf space
424 
425    Output Arguments:
426 .  sf - The parallel star forest
427 
428    Level: intermediate
429 
430 .seealso: PetscSFCreate(), PetscLayoutCreate(), PetscSFSetGraphLayout()
431 @*/
432 PetscErrorCode PetscSFCreateFromLayouts(PetscLayout rmap, PetscLayout lmap, PetscSF* sf)
433 {
434   PetscErrorCode ierr;
435   PetscInt       i,nroots,nleaves = 0;
436   PetscInt       rN, lst, len;
437   PetscMPIInt    owner = -1;
438   PetscSFNode    *remote;
439   MPI_Comm       rcomm = rmap->comm;
440   MPI_Comm       lcomm = lmap->comm;
441   PetscMPIInt    flg;
442 
443   PetscFunctionBegin;
444   PetscValidPointer(sf,3);
445   if (!rmap->setupcalled) SETERRQ(rcomm,PETSC_ERR_ARG_WRONGSTATE,"Root layout not setup");
446   if (!lmap->setupcalled) SETERRQ(lcomm,PETSC_ERR_ARG_WRONGSTATE,"Leaf layout not setup");
447   ierr = MPI_Comm_compare(rcomm,lcomm,&flg);CHKERRMPI(ierr);
448   if (flg != MPI_CONGRUENT && flg != MPI_IDENT) SETERRQ(rcomm,PETSC_ERR_SUP,"cannot map two layouts with non-matching communicators");
449   ierr = PetscSFCreate(rcomm,sf);CHKERRQ(ierr);
450   ierr = PetscLayoutGetLocalSize(rmap,&nroots);CHKERRQ(ierr);
451   ierr = PetscLayoutGetSize(rmap,&rN);CHKERRQ(ierr);
452   ierr = PetscLayoutGetRange(lmap,&lst,&len);CHKERRQ(ierr);
453   ierr = PetscMalloc1(len-lst,&remote);CHKERRQ(ierr);
454   for (i = lst; i < len && i < rN; i++) {
455     if (owner < -1 || i >= rmap->range[owner+1]) {
456       ierr = PetscLayoutFindOwner(rmap,i,&owner);CHKERRQ(ierr);
457     }
458     remote[nleaves].rank  = owner;
459     remote[nleaves].index = i - rmap->range[owner];
460     nleaves++;
461   }
462   ierr = PetscSFSetGraph(*sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,remote,PETSC_COPY_VALUES);CHKERRQ(ierr);
463   ierr = PetscFree(remote);CHKERRQ(ierr);
464   PetscFunctionReturn(0);
465 }
466 
467 /* TODO: handle nooffprocentries like MatZeroRowsMapLocal_Private, since this code is the same */
468 PetscErrorCode PetscLayoutMapLocal(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
469 {
470   PetscInt      *owners = map->range;
471   PetscInt       n      = map->n;
472   PetscSF        sf;
473   PetscInt      *lidxs,*work = NULL;
474   PetscSFNode   *ridxs;
475   PetscMPIInt    rank, p = 0;
476   PetscInt       r, len = 0;
477   PetscErrorCode ierr;
478 
479   PetscFunctionBegin;
480   if (on) *on = 0;              /* squelch -Wmaybe-uninitialized */
481   /* Create SF where leaves are input idxs and roots are owned idxs */
482   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRMPI(ierr);
483   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
484   for (r = 0; r < n; ++r) lidxs[r] = -1;
485   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
486   for (r = 0; r < N; ++r) {
487     const PetscInt idx = idxs[r];
488     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
489     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
490       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
491     }
492     ridxs[r].rank = p;
493     ridxs[r].index = idxs[r] - owners[p];
494   }
495   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
496   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
497   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
498   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
499   if (ogidxs) { /* communicate global idxs */
500     PetscInt cum = 0,start,*work2;
501 
502     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
503     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
504     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
505     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRMPI(ierr);
506     start -= cum;
507     cum = 0;
508     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
509     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
510     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
511     ierr = PetscFree(work2);CHKERRQ(ierr);
512   }
513   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
514   /* Compress and put in indices */
515   for (r = 0; r < n; ++r)
516     if (lidxs[r] >= 0) {
517       if (work) work[len] = work[r];
518       lidxs[len++] = r;
519     }
520   if (on) *on = len;
521   if (oidxs) *oidxs = lidxs;
522   if (ogidxs) *ogidxs = work;
523   PetscFunctionReturn(0);
524 }
525