xref: /petsc/src/binding/petsc4py/src/petsc4py/PETSc/SF.pyx (revision 5dafb6f581977b074a43082461f4a734d7d3976b)
1# --------------------------------------------------------------------
2
3class SFType(object):
4    """The star forest types."""
5    BASIC      = S_(PETSCSFBASIC)
6    NEIGHBOR   = S_(PETSCSFNEIGHBOR)
7    ALLGATHERV = S_(PETSCSFALLGATHERV)
8    ALLGATHER  = S_(PETSCSFALLGATHER)
9    GATHERV    = S_(PETSCSFGATHERV)
10    GATHER     = S_(PETSCSFGATHER)
11    ALLTOALL   = S_(PETSCSFALLTOALL)
12    WINDOW     = S_(PETSCSFWINDOW)
13
14# --------------------------------------------------------------------
15
16
17cdef class SF(Object):
18    """Star Forest object for communication.
19
20    SF is used for setting up and managing the communication of certain
21    entries of arrays and `Vec` between MPI processes.
22
23    """
24
25    Type = SFType
26
27    def __cinit__(self):
28        self.obj = <PetscObject*> &self.sf
29        self.sf  = NULL
30
31    def view(self, Viewer viewer=None) -> None:
32        """View a star forest.
33
34        Collective.
35
36        Parameters
37        ----------
38        viewer
39            A `Viewer` to display the graph.
40
41        See Also
42        --------
43        petsc.PetscSFView
44
45        """
46        cdef PetscViewer vwr = NULL
47        if viewer is not None: vwr = viewer.vwr
48        CHKERR(PetscSFView(self.sf, vwr))
49
50    def destroy(self) -> Self:
51        """Destroy the star forest.
52
53        Collective.
54
55        See Also
56        --------
57        petsc.PetscSFDestroy
58
59        """
60        CHKERR(PetscSFDestroy(&self.sf))
61        return self
62
63    def create(self, comm: Comm | None = None) -> Self:
64        """Create a star forest communication context.
65
66        Collective.
67
68        Parameters
69        ----------
70        comm
71            MPI communicator, defaults to `Sys.getDefaultComm`.
72
73        See Also
74        --------
75        petsc.PetscSFCreate
76
77        """
78        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
79        cdef PetscSF newsf = NULL
80        CHKERR(PetscSFCreate(ccomm, &newsf))
81        CHKERR(PetscCLEAR(self.obj)); self.sf = newsf
82        return self
83
84    def setType(self, sf_type: Type | str) -> None:
85        """Set the type of the star forest.
86
87        Collective.
88
89        Parameters
90        ----------
91        sf_type
92            The star forest type.
93
94        See Also
95        --------
96        petsc.PetscSFSetType
97
98        """
99        cdef PetscSFType cval = NULL
100        sf_type = str2bytes(sf_type, &cval)
101        CHKERR(PetscSFSetType(self.sf, cval))
102
103    def getType(self) -> str:
104        """Return the type name of the star forest.
105
106        Collective.
107
108        See Also
109        --------
110        petsc.PetscSFGetType
111
112        """
113        cdef PetscSFType cval = NULL
114        CHKERR(PetscSFGetType(self.sf, &cval))
115        return bytes2str(cval)
116
117    def setFromOptions(self) -> None:
118        """Set options using the options database.
119
120        Logically collective.
121
122        See Also
123        --------
124        petsc_options, petsc.PetscSFSetFromOptions
125
126        """
127        CHKERR(PetscSFSetFromOptions(self.sf))
128
129    def setUp(self) -> None:
130        """Set up communication structures.
131
132        Collective.
133
134        See Also
135        --------
136        petsc.PetscSFSetUp
137
138        """
139        CHKERR(PetscSFSetUp(self.sf))
140
141    def reset(self) -> None:
142        """Reset a star forest so that different sizes or neighbors can be used.
143
144        Collective.
145
146        See Also
147        --------
148        petsc.PetscSFReset
149
150        """
151        CHKERR(PetscSFReset(self.sf))
152
153    #
154
155    def getGraph(self) -> tuple[int, ArrayInt, ArrayInt]:
156        """Return star forest graph.
157
158        Not collective.
159
160        The number of leaves can be determined from the size of ``ilocal``.
161
162        Returns
163        -------
164        nroots : int
165            Number of root vertices on the current process (these are possible
166            targets for other process to attach leaves).
167        ilocal : ArrayInt
168            Locations of leaves in leafdata buffers.
169        iremote : ArrayInt
170            Remote locations of root vertices for each leaf on the current
171            process.
172
173        See Also
174        --------
175        petsc.PetscSFGetGraph
176
177        """
178        cdef PetscInt nroots = 0, nleaves = 0
179        cdef const PetscInt *ilocal = NULL
180        cdef const PetscSFNode *iremote = NULL
181        CHKERR(PetscSFGetGraph(self.sf, &nroots, &nleaves, &ilocal, &iremote))
182        if ilocal == NULL:
183            local = arange(0, nleaves, 1)
184        else:
185            local = array_i(nleaves, ilocal)
186        remote = array_i(nleaves*2, <const PetscInt*>iremote)
187        remote = remote.reshape(nleaves, 2)
188        return toInt(nroots), local, remote
189
190    def setGraph(self, nroots: int, local: Sequence[int], remote: Sequence[int]) -> None:
191        """Set star forest graph.
192
193        Collective.
194
195        The number of leaves argument can be determined from the size of
196        ``local`` and/or ``remote``.
197
198        Parameters
199        ----------
200        nroots
201            Number of root vertices on the current process (these are possible
202            targets for other process to attach leaves).
203        local
204            Locations of leaves in leafdata buffers, pass `None` for contiguous
205            storage.
206        remote
207            Remote locations of root vertices for each leaf on the current
208            process. Should be ``2*nleaves`` long as (rank, index) pairs.
209
210        See Also
211        --------
212        petsc.PetscSFSetGraph
213
214        """
215        cdef PetscInt cnroots = asInt(nroots)
216        cdef PetscInt nleaves = 0
217        cdef PetscInt nremote = 0
218        cdef PetscInt *ilocal = NULL
219        cdef PetscSFNode* iremote = NULL
220        remote = iarray_i(remote, &nremote, <PetscInt**>&iremote)
221        if local is not None:
222            local = iarray_i(local, &nleaves, &ilocal)
223            assert 2*nleaves == nremote
224        else:
225            assert nremote % 2 == 0
226            nleaves = nremote // 2
227        CHKERR(PetscSFSetGraph(self.sf, cnroots, nleaves, ilocal, PETSC_COPY_VALUES, iremote, PETSC_COPY_VALUES))
228
229    def setRankOrder(self, flag: bool) -> None:
230        """Sort multi-points for gathers and scatters by rank order.
231
232        Logically collective.
233
234        Parameters
235        ----------
236        flag
237            `True` to sort, `False` to skip sorting.
238
239        See Also
240        --------
241        petsc.PetscSFSetRankOrder
242
243        """
244        cdef PetscBool bval = asBool(flag)
245        CHKERR(PetscSFSetRankOrder(self.sf, bval))
246
247    def getMulti(self) -> SF:
248        """Return the inner SF implementing gathers and scatters.
249
250        Collective.
251
252        See Also
253        --------
254        petsc.PetscSFGetMultiSF
255
256        """
257        cdef SF sf = SF()
258        CHKERR(PetscSFGetMultiSF(self.sf, &sf.sf))
259        CHKERR(PetscINCREF(sf.obj))
260        return sf
261
262    def createInverse(self) -> SF:
263        """Create the inverse map.
264
265        Collective.
266
267        Create the inverse map given a PetscSF in which all vertices have
268        degree 1.
269
270        See Also
271        --------
272        petsc.PetscSFCreateInverseSF
273
274        """
275        cdef SF sf = SF()
276        CHKERR(PetscSFCreateInverseSF(self.sf, &sf.sf))
277        return sf
278
279    def computeDegree(self) -> ArrayInt:
280        """Compute and return the degree of each root vertex.
281
282        Collective.
283
284        See Also
285        --------
286        petsc.PetscSFComputeDegreeBegin, petsc.PetscSFComputeDegreeEnd
287
288        """
289        cdef const PetscInt *cdegree = NULL
290        cdef PetscInt nroots = 0
291        CHKERR(PetscSFComputeDegreeBegin(self.sf, &cdegree))
292        CHKERR(PetscSFComputeDegreeEnd(self.sf, &cdegree))
293        CHKERR(PetscSFGetGraph(self.sf, &nroots, NULL, NULL, NULL))
294        degree = array_i(nroots, cdegree)
295        return degree
296
297    def createEmbeddedRootSF(self, selected: Sequence[int]) -> SF:
298        """Remove edges from all but the selected roots.
299
300        Collective.
301
302        Does not remap indices.
303
304        Parameters
305        ----------
306        selected
307            Indices of the selected roots on this process.
308
309        See Also
310        --------
311        petsc.PetscSFCreateEmbeddedRootSF
312
313        """
314        cdef PetscInt nroots = asInt(len(selected))
315        cdef PetscInt *cselected = NULL
316        selected = iarray_i(selected, &nroots, &cselected)
317        cdef SF sf = SF()
318        CHKERR(PetscSFCreateEmbeddedRootSF(self.sf, nroots, cselected, &sf.sf))
319        return sf
320
321    def createEmbeddedLeafSF(self, selected: Sequence[int]) -> SF:
322        """Remove edges from all but the selected leaves.
323
324        Collective.
325
326        Does not remap indices.
327
328        Parameters
329        ----------
330        selected
331            Indices of the selected roots on this process.
332
333        See Also
334        --------
335        petsc.PetscSFCreateEmbeddedLeafSF
336
337        """
338        cdef PetscInt nleaves = asInt(len(selected))
339        cdef PetscInt *cselected = NULL
340        selected = iarray_i(selected, &nleaves, &cselected)
341        cdef SF sf = SF()
342        CHKERR(PetscSFCreateEmbeddedLeafSF(self.sf, nleaves, cselected, &sf.sf))
343        return sf
344
345    def createSectionSF(self, Section rootSection, remoteOffsets: Sequence[int] | None, Section leafSection) -> SF:
346        """Create an expanded `SF` of DOFs.
347
348        Collective.
349
350        Assumes the input `SF` relates points.
351
352        Parameters
353        ----------
354        rootSection
355            Data layout of remote points for outgoing data (this is usually
356            the serial section).
357        remoteOffsets
358            Offsets for point data on remote processes (these are offsets from
359            the root section), or `None`.
360        leafSection
361            Data layout of local points for incoming data (this is the
362            distributed section).
363
364        See Also
365        --------
366        petsc.PetscSFCreateSectionSF
367
368        """
369        cdef SF sectionSF = SF()
370        cdef PetscInt noffsets = 0
371        cdef PetscInt *cremoteOffsets = NULL
372        if remoteOffsets is not None:
373            remoteOffsets = iarray_i(remoteOffsets, &noffsets, &cremoteOffsets)
374        CHKERR(PetscSFCreateSectionSF(self.sf, rootSection.sec, cremoteOffsets,
375                                      leafSection.sec, &sectionSF.sf))
376        return sectionSF
377
378    def distributeSection(self, Section rootSection, Section leafSection=None) -> tuple[ArrayInt, Section]:
379        """Create a new, reorganized `Section`.
380
381        Collective.
382
383        Moves from the root to the leaves of the `SF`.
384
385        Parameters
386        ----------
387        rootSection
388            Section defined on root space.
389        leafSection
390            Section defined on the leaf space.
391
392        See Also
393        --------
394        petsc.PetscSFDistributeSection
395
396        """
397        cdef PetscInt lpStart = 0
398        cdef PetscInt lpEnd = 0
399        cdef PetscInt *cremoteOffsets = NULL
400        cdef ndarray remoteOffsets
401        cdef MPI_Comm ccomm = def_Comm(self.comm, PETSC_COMM_DEFAULT)
402        if leafSection is None:
403            leafSection = Section()
404        if leafSection.sec == NULL:
405            CHKERR(PetscSectionCreate(ccomm, &leafSection.sec))
406        CHKERR(PetscSFDistributeSection(self.sf, rootSection.sec,
407                                        &cremoteOffsets, leafSection.sec))
408        CHKERR(PetscSectionGetChart(leafSection.sec, &lpStart, &lpEnd))
409        remoteOffsets = array_i(lpEnd-lpStart, cremoteOffsets)
410        CHKERR(PetscFree(cremoteOffsets))
411        return (remoteOffsets, leafSection)
412
413    def compose(self, SF sf) -> SF:
414        """Compose a new `SF`.
415
416        Collective.
417
418        Puts the ``sf`` under this object in a top (roots) down (leaves) view.
419
420        Parameters
421        ----------
422        sf
423            `SF` to put under this object.
424
425        See Also
426        --------
427        petsc.PetscSFCompose
428
429        """
430        cdef SF csf = SF()
431        CHKERR(PetscSFCompose(self.sf, sf.sf, &csf.sf))
432        return csf
433
434    def bcastBegin(self, unit: Datatype, ndarray rootdata, ndarray leafdata, op: Op) -> None:
435        """Begin pointwise broadcast.
436
437        Collective.
438
439        Root values are reduced to leaf values. This call has to be concluded
440        with a call to `bcastEnd`.
441
442        Parameters
443        ----------
444        unit
445            MPI datatype.
446        rootdata
447            Buffer to broadcast.
448        leafdata
449            Buffer to be reduced with values from each leaf's respective root.
450        op
451            MPI reduction operation.
452
453        See Also
454        --------
455        bcastEnd, petsc.PetscSFBcastBegin
456
457        """
458        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
459        cdef MPI_Op cop = mpi4py_Op_Get(op)
460        CHKERR(PetscSFBcastBegin(self.sf, dtype, <const void*>PyArray_DATA(rootdata),
461                                 <void*>PyArray_DATA(leafdata), cop))
462
463    def bcastEnd(self, unit: Datatype, ndarray rootdata, ndarray leafdata, op: Op) -> None:
464        """End a broadcast & reduce operation started with `bcastBegin`.
465
466        Collective.
467
468        Parameters
469        ----------
470        unit
471            MPI datatype.
472        rootdata
473            Buffer to broadcast.
474        leafdata
475            Buffer to be reduced with values from each leaf's respective root.
476        op
477            MPI reduction operation.
478
479        See Also
480        --------
481        bcastBegin, petsc.PetscSFBcastEnd
482
483        """
484        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
485        cdef MPI_Op cop = mpi4py_Op_Get(op)
486        CHKERR(PetscSFBcastEnd(self.sf, dtype, <const void*>PyArray_DATA(rootdata),
487                               <void*>PyArray_DATA(leafdata), cop))
488
489    def reduceBegin(self, unit: Datatype, ndarray leafdata, ndarray rootdata, op: Op) -> None:
490        """Begin reduction of leafdata into rootdata.
491
492        Collective.
493
494        This call has to be completed with call to `reduceEnd`.
495
496        Parameters
497        ----------
498        unit
499            MPI datatype.
500        leafdata
501            Values to reduce.
502        rootdata
503            Result of reduction of values from all leaves of each root.
504        op
505            MPI reduction operation.
506
507        See Also
508        --------
509        reduceEnd, petsc.PetscSFReduceBegin
510
511        """
512        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
513        cdef MPI_Op cop = mpi4py_Op_Get(op)
514        CHKERR(PetscSFReduceBegin(self.sf, dtype, <const void*>PyArray_DATA(leafdata),
515                                  <void*>PyArray_DATA(rootdata), cop))
516
517    def reduceEnd(self, unit: Datatype, ndarray leafdata, ndarray rootdata, op: Op) -> None:
518        """End a reduction operation started with `reduceBegin`.
519
520        Collective.
521
522        Parameters
523        ----------
524        unit
525            MPI datatype.
526        leafdata
527            Values to reduce.
528        rootdata
529            Result of reduction of values from all leaves of each root.
530        op
531            MPI reduction operation.
532
533        See Also
534        --------
535        reduceBegin, petsc.PetscSFReduceEnd
536
537        """
538        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
539        cdef MPI_Op cop = mpi4py_Op_Get(op)
540        CHKERR(PetscSFReduceEnd(self.sf, dtype, <const void*>PyArray_DATA(leafdata),
541                                <void*>PyArray_DATA(rootdata), cop))
542
543    def scatterBegin(self, unit: Datatype, ndarray multirootdata, ndarray leafdata) -> None:
544        """Begin pointwise scatter operation.
545
546        Collective.
547
548        Operation is from multi-roots to leaves.
549        This call has to be completed with `scatterEnd`.
550
551        Parameters
552        ----------
553        unit
554            MPI datatype.
555        multirootdata
556            Root buffer to send to each leaf, one unit of data per leaf.
557        leafdata
558            Leaf data to be updated with personal data from each respective root.
559
560        See Also
561        --------
562        scatterEnd, petsc.PetscSFScatterBegin
563
564        """
565        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
566        CHKERR(PetscSFScatterBegin(self.sf, dtype, <const void*>PyArray_DATA(multirootdata),
567                                   <void*>PyArray_DATA(leafdata)))
568
569    def scatterEnd(self, unit: Datatype, ndarray multirootdata, ndarray leafdata) -> None:
570        """End scatter operation that was started with `scatterBegin`.
571
572        Collective.
573
574        Parameters
575        ----------
576        unit
577            MPI datatype.
578        multirootdata
579            Root buffer to send to each leaf, one unit of data per leaf.
580        leafdata
581            Leaf data to be updated with personal data from each respective root.
582
583        See Also
584        --------
585        scatterBegin, petsc.PetscSFScatterEnd
586
587        """
588        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
589        CHKERR(PetscSFScatterEnd(self.sf, dtype, <const void*>PyArray_DATA(multirootdata),
590                                 <void*>PyArray_DATA(leafdata)))
591
592    def gatherBegin(self, unit: Datatype, ndarray leafdata, ndarray multirootdata) -> None:
593        """Begin pointwise gather of all leaves into multi-roots.
594
595        Collective.
596
597        This call has to be completed with `gatherEnd`.
598
599        Parameters
600        ----------
601        unit
602            MPI datatype.
603        leafdata
604            Leaf data to gather to roots.
605        multirootdata
606            Root buffer to gather into, amount of space per root is
607            equal to its degree.
608
609        See Also
610        --------
611        gatherEnd, petsc.PetscSFGatherBegin
612
613        """
614        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
615        CHKERR(PetscSFGatherBegin(self.sf, dtype, <const void*>PyArray_DATA(leafdata),
616                                  <void*>PyArray_DATA(multirootdata)))
617
618    def gatherEnd(self, unit: Datatype, ndarray leafdata, ndarray multirootdata) -> None:
619        """End gather operation that was started with `gatherBegin`.
620
621        Collective.
622
623        Parameters
624        ----------
625        unit
626            MPI datatype.
627        leafdata
628            Leaf data to gather to roots.
629        multirootdata
630            Root buffer to gather into, amount of space per root is
631            equal to its degree.
632
633        See Also
634        --------
635        gatherBegin, petsc.PetscSFGatherEnd
636
637        """
638        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
639        CHKERR(PetscSFGatherEnd(self.sf, dtype, <const void*>PyArray_DATA(leafdata),
640                                <void*>PyArray_DATA(multirootdata)))
641
642    def fetchAndOpBegin(self, unit: Datatype, rootdata: ndarray, leafdata: ndarray, leafupdate: ndarray, op: Op) -> None:
643        """Begin fetch and update operation.
644
645        Collective.
646
647        This operation fetches values from root and updates atomically
648        by applying an operation using the leaf value.
649
650        This call has to be completed with `fetchAndOpEnd`.
651
652        Parameters
653        ----------
654        unit
655            MPI datatype.
656        rootdata
657            Root values to be updated, input state is seen by first process
658            to perform an update.
659        leafdata
660            Leaf values to use in reduction.
661        leafupdate
662            State at each leaf's respective root immediately prior to my atomic
663            update.
664        op
665            MPI reduction operation.
666
667        See Also
668        --------
669        fetchAndOpEnd, petsc.PetscSFFetchAndOpBegin
670
671        """
672        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
673        cdef MPI_Op cop = mpi4py_Op_Get(op)
674        CHKERR(PetscSFFetchAndOpBegin(self.sf, dtype, <void*>PyArray_DATA(rootdata),
675                                      <const void*>PyArray_DATA(leafdata),
676                                      <void*>PyArray_DATA(leafupdate), cop))
677
678    def fetchAndOpEnd(self, unit: Datatype, rootdata: ndarray, leafdata: ndarray, leafupdate: ndarray, op: Op) -> None:
679        """End operation started in a matching call to `fetchAndOpBegin`.
680
681        Collective.
682
683        Parameters
684        ----------
685        unit
686            MPI datatype.
687        rootdata
688            Root values to be updated, input state is seen by first process
689            to perform an update.
690        leafdata
691            Leaf values to use in reduction.
692        leafupdate
693            State at each leaf's respective root immediately prior to my atomic
694            update.
695        op
696            MPI reduction operation.
697
698        See Also
699        --------
700        fetchAndOpBegin, petsc.PetscSFFetchAndOpEnd
701
702        """
703        cdef MPI_Datatype dtype = mpi4py_Datatype_Get(unit)
704        cdef MPI_Op cop = mpi4py_Op_Get(op)
705        CHKERR(PetscSFFetchAndOpEnd(self.sf, dtype, <void*>PyArray_DATA(rootdata),
706                                    <const void*>PyArray_DATA(leafdata),
707                                    <void*>PyArray_DATA(leafupdate), cop))
708
709# --------------------------------------------------------------------
710
711del SFType
712
713# --------------------------------------------------------------------
714