xref: /petsc/src/binding/petsc4py/src/petsc4py/PETSc/Vec.pyx (revision bd70dab1bb46ff03779dabed93ecafb519950087) !
1# --------------------------------------------------------------------
2
3class VecType(object):
4    """The vector type."""
5    SEQ        = S_(VECSEQ)
6    MPI        = S_(VECMPI)
7    STANDARD   = S_(VECSTANDARD)
8    SHARED     = S_(VECSHARED)
9    SEQVIENNACL= S_(VECSEQVIENNACL)
10    MPIVIENNACL= S_(VECMPIVIENNACL)
11    VIENNACL   = S_(VECVIENNACL)
12    SEQCUDA    = S_(VECSEQCUDA)
13    MPICUDA    = S_(VECMPICUDA)
14    CUDA       = S_(VECCUDA)
15    SEQHIP     = S_(VECSEQHIP)
16    MPIHIP     = S_(VECMPIHIP)
17    HIP        = S_(VECHIP)
18    NEST       = S_(VECNEST)
19    SEQKOKKOS  = S_(VECSEQKOKKOS)
20    MPIKOKKOS  = S_(VECMPIKOKKOS)
21    KOKKOS     = S_(VECKOKKOS)
22
23
24class VecOption(object):
25    """Vector assembly option."""
26    IGNORE_OFF_PROC_ENTRIES = VEC_IGNORE_OFF_PROC_ENTRIES
27    IGNORE_NEGATIVE_INDICES = VEC_IGNORE_NEGATIVE_INDICES
28
29# --------------------------------------------------------------------
30
31
32cdef class Vec(Object):
33    """A vector object.
34
35    See Also
36    --------
37    petsc.Vec
38
39    """
40
41    Type = VecType
42    Option = VecOption
43
44    #
45
46    def __cinit__(self):
47        self.obj = <PetscObject*> &self.vec
48        self.vec = NULL
49
50    # unary operations
51
52    def __pos__(self):
53        return vec_pos(self)
54
55    def __neg__(self):
56        return vec_neg(self)
57
58    def __abs__(self):
59        return vec_abs(self)
60
61    # inplace binary operations
62
63    def __iadd__(self, other):
64        return vec_iadd(self, other)
65
66    def __isub__(self, other):
67        return vec_isub(self, other)
68
69    def __imul__(self, other):
70        return vec_imul(self, other)
71
72    def __idiv__(self, other):
73        return vec_idiv(self, other)
74
75    def __itruediv__(self, other):
76        return vec_idiv(self, other)
77
78    # binary operations
79
80    def __add__(self, other):
81        return vec_add(self, other)
82
83    def __radd__(self, other):
84        return vec_radd(self, other)
85
86    def __sub__(self, other):
87        return vec_sub(self, other)
88
89    def __rsub__(self, other):
90        return vec_rsub(self, other)
91
92    def __mul__(self, other):
93        return vec_mul(self, other)
94
95    def __rmul__(self, other):
96        return vec_rmul(self, other)
97
98    def __div__(self, other):
99        return vec_div(self, other)
100
101    def __rdiv__(self, other):
102        return vec_rdiv(self, other)
103
104    def __truediv__(self, other):
105        return vec_div(self, other)
106
107    def __rtruediv__(self, other):
108        return vec_rdiv(self, other)
109
110    def __matmul__(self, other):
111        return vec_matmul(self, other)
112
113    def __getitem__(self, i):
114        return vec_getitem(self, i)
115
116    def __setitem__(self, i, v):
117        vec_setitem(self, i, v)
118
119    # buffer interface (PEP 3118)
120
121    def __getbuffer__(self, Py_buffer *view, int flags):
122        cdef _Vec_buffer buf = _Vec_buffer(self)
123        buf.acquirebuffer(view, flags)
124
125    def __releasebuffer__(self, Py_buffer *view):
126        cdef _Vec_buffer buf = <_Vec_buffer>(view.obj)
127        buf.releasebuffer(view)
128        <void>self # unused
129
130    # 'with' statement (PEP 343)
131
132    def __enter__(self):
133        cdef _Vec_buffer buf = _Vec_buffer(self)
134        self.set_attr('__buffer__', buf)
135        return buf.enter()
136
137    def __exit__(self, *exc):
138        cdef _Vec_buffer buf = self.get_attr('__buffer__')
139        self.set_attr('__buffer__', None)
140        return buf.exit()
141
142    #
143
144    def view(self, Viewer viewer=None) -> None:
145        """Display the vector.
146
147        Collective.
148
149        Parameters
150        ----------
151        viewer
152            A `Viewer` instance or `None` for the default viewer.
153
154        See Also
155        --------
156        load, petsc.VecView
157
158        """
159        cdef PetscViewer vwr = NULL
160        if viewer is not None: vwr = viewer.vwr
161        CHKERR(VecView(self.vec, vwr))
162
163    def destroy(self) -> Self:
164        """Destroy the vector.
165
166        Collective.
167
168        See Also
169        --------
170        create, petsc.VecDestroy
171
172        """
173        CHKERR(VecDestroy(&self.vec))
174        return self
175
176    def create(self, comm: Comm | None = None) -> Self:
177        """Create a vector object.
178
179        Collective.
180
181        After creation the vector type can then be set with `setType`.
182
183        Parameters
184        ----------
185        comm
186            MPI communicator, defaults to `Sys.getDefaultComm`.
187
188        See Also
189        --------
190        destroy, petsc.VecCreate
191
192        """
193        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
194        cdef PetscVec newvec = NULL
195        CHKERR(VecCreate(ccomm, &newvec))
196        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
197        return self
198
199    def setType(self, vec_type: Type | str) -> None:
200        """Set the vector type.
201
202        Collective.
203
204        Parameters
205        ----------
206        vec_type
207            The vector type.
208
209        See Also
210        --------
211        create, getType, petsc.VecSetType
212
213        """
214        cdef PetscVecType cval = NULL
215        vec_type = str2bytes(vec_type, &cval)
216        CHKERR(VecSetType(self.vec, cval))
217
218    def setSizes(
219        self,
220        size: LayoutSizeSpec,
221        bsize: int | None = None) -> None:
222        """Set the local and global sizes of the vector.
223
224        Collective.
225
226        Parameters
227        ----------
228        size
229            Vector size.
230        bsize
231            Vector block size. If `None`, ``bsize = 1``.
232
233        See Also
234        --------
235        getSizes, petsc.VecSetSizes
236
237        """
238        cdef PetscInt bs=0, n=0, N=0
239        Vec_Sizes(size, bsize, &bs, &n, &N)
240        CHKERR(VecSetSizes(self.vec, n, N))
241        if bs != PETSC_DECIDE:
242            CHKERR(VecSetBlockSize(self.vec, bs))
243
244    #
245
246    # FIXME the comm argument is hideous.
247    def createSeq(
248        self,
249        size: LayoutSizeSpec,
250        bsize: int | None = None,
251        comm: Comm | None = None) -> Self:
252        """Create a sequential `Type.SEQ` vector.
253
254        Collective.
255
256        Parameters
257        ----------
258        size
259            Vector size.
260        bsize
261            Vector block size. If `None`, ``bsize = 1``.
262        comm
263            MPI communicator, defaults to `COMM_SELF`.
264
265        See Also
266        --------
267        createMPI, petsc.VecCreateSeq
268
269        """
270        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_SELF)
271        cdef PetscInt bs=0, n=0, N=0
272        Vec_Sizes(size, bsize, &bs, &n, &N)
273        Sys_Layout(ccomm, bs, &n, &N)
274        if bs == PETSC_DECIDE: bs = 1
275        cdef PetscVec newvec = NULL
276        CHKERR(VecCreate(ccomm, &newvec))
277        CHKERR(VecSetSizes(newvec, n, N))
278        CHKERR(VecSetBlockSize(newvec, bs))
279        CHKERR(VecSetType(newvec, VECSEQ))
280        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
281        return self
282
283    def createMPI(
284        self,
285        size: LayoutSizeSpec,
286        bsize: int | None = None,
287        comm: Comm | None = None) -> Self:
288        """Create a parallel `Type.MPI` vector.
289
290        Collective.
291
292        Parameters
293        ----------
294        size
295            Vector size.
296        bsize
297            Vector block size. If `None`, ``bsize = 1``.
298        comm
299            MPI communicator, defaults to `Sys.getDefaultComm`.
300
301        See Also
302        --------
303        createSeq, petsc.VecCreateMPI
304
305        """
306        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
307        cdef PetscInt bs=0, n=0, N=0
308        Vec_Sizes(size, bsize, &bs, &n, &N)
309        Sys_Layout(ccomm, bs, &n, &N)
310        if bs == PETSC_DECIDE: bs = 1
311        cdef PetscVec newvec = NULL
312        CHKERR(VecCreate(ccomm, &newvec))
313        CHKERR(VecSetSizes(newvec, n, N))
314        CHKERR(VecSetBlockSize(newvec, bs))
315        CHKERR(VecSetType(newvec, VECMPI))
316        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
317        return self
318
319    def createWithArray(
320        self,
321        array: Sequence[Scalar],
322        size: LayoutSizeSpec | None = None,
323        bsize: int | None = None,
324        comm: Comm | None = None) -> Self:
325        """Create a vector using a provided array.
326
327        Collective.
328
329        This method will create either a `Type.SEQ` or `Type.MPI`
330        depending on the size of the communicator.
331
332        Parameters
333        ----------
334        array
335            Array to store the vector values. Must be at least as large as
336            the local size of the vector.
337        size
338            Vector size.
339        bsize
340            Vector block size. If `None`, ``bsize = 1``.
341        comm
342            MPI communicator, defaults to `Sys.getDefaultComm`.
343
344        See Also
345        --------
346        petsc.VecCreateSeqWithArray, petsc.VecCreateMPIWithArray
347
348        """
349        cdef PetscInt na=0
350        cdef PetscScalar *sa=NULL
351        array = iarray_s(array, &na, &sa)
352        if size is None: size = (toInt(na), toInt(PETSC_DECIDE))
353        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
354        cdef PetscInt bs=0, n=0, N=0
355        Vec_Sizes(size, bsize, &bs, &n, &N)
356        Sys_Layout(ccomm, bs, &n, &N)
357        if bs == PETSC_DECIDE: bs = 1
358        if na < n:  raise ValueError(
359            "array size %d and vector local size %d block size %d" %
360            (toInt(na), toInt(n), toInt(bs)))
361        cdef PetscVec newvec = NULL
362        if comm_size(ccomm) == 1:
363            CHKERR(VecCreateSeqWithArray(ccomm, bs, N, sa, &newvec))
364        else:
365            CHKERR(VecCreateMPIWithArray(ccomm, bs, n, N, sa, &newvec))
366        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
367        self.set_attr('__array__', array)
368        return self
369
370    def createCUDAWithArrays(
371        self,
372        cpuarray: Sequence[Scalar] | None = None,
373        cudahandle: Any | None = None,  # FIXME What type is appropriate here?
374        size: LayoutSizeSpec | None = None,
375        bsize: int | None = None,
376        comm: Comm | None = None) -> Self:
377        """Create a `Type.CUDA` vector with optional arrays.
378
379        Collective.
380
381        Parameters
382        ----------
383        cpuarray
384            Host array. Will be lazily allocated if not provided.
385        cudahandle
386            Address of the array on the GPU. Will be lazily allocated if
387            not provided.
388        size
389            Vector size.
390        bsize
391            Vector block size. If `None`, ``bsize = 1``.
392        comm
393            MPI communicator, defaults to `Sys.getDefaultComm`.
394
395        See Also
396        --------
397        petsc.VecCreateSeqCUDAWithArrays, petsc.VecCreateMPICUDAWithArrays
398
399        """
400        cdef PetscInt na=0
401        cdef PetscScalar *sa=NULL
402        cdef PetscScalar *gpuarray = NULL
403        if cudahandle:
404            gpuarray = <PetscScalar*>(<Py_uintptr_t>cudahandle)
405        if cpuarray is not None:
406            cpuarray = iarray_s(cpuarray, &na, &sa)
407
408        if size is None: size = (toInt(na), toInt(PETSC_DECIDE))
409        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
410        cdef PetscInt bs=0, n=0, N=0
411        Vec_Sizes(size, bsize, &bs, &n, &N)
412        Sys_Layout(ccomm, bs, &n, &N)
413        if bs == PETSC_DECIDE: bs = 1
414        if na < n:  raise ValueError(
415            "array size %d and vector local size %d block size %d" %
416            (toInt(na), toInt(n), toInt(bs)))
417        cdef PetscVec newvec = NULL
418        if comm_size(ccomm) == 1:
419            CHKERR(VecCreateSeqCUDAWithArrays(ccomm, bs, N, sa, gpuarray, &newvec))
420        else:
421            CHKERR(VecCreateMPICUDAWithArrays(ccomm, bs, n, N, sa, gpuarray, &newvec))
422        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
423
424        if cpuarray is not None:
425            self.set_attr('__array__', cpuarray)
426        return self
427
428    def createHIPWithArrays(
429        self,
430        cpuarray: Sequence[Scalar] | None = None,
431        hiphandle: Any | None = None,  # FIXME What type is appropriate here?
432        size: LayoutSizeSpec | None = None,
433        bsize: int | None = None,
434        comm: Comm | None = None) -> Self:
435        """Create a `Type.HIP` vector with optional arrays.
436
437        Collective.
438
439        Parameters
440        ----------
441        cpuarray
442            Host array. Will be lazily allocated if not provided.
443        hiphandle
444            Address of the array on the GPU. Will be lazily allocated if
445            not provided.
446        size
447            Vector size.
448        bsize
449            Vector block size. If `None`, ``bsize = 1``.
450        comm
451            MPI communicator, defaults to `Sys.getDefaultComm`.
452
453        See Also
454        --------
455        petsc.VecCreateSeqHIPWithArrays, petsc.VecCreateMPIHIPWithArrays
456
457        """
458        cdef PetscInt na=0
459        cdef PetscScalar *sa=NULL
460        cdef PetscScalar *gpuarray = NULL
461        if hiphandle:
462            gpuarray = <PetscScalar*>(<Py_uintptr_t>hiphandle)
463        if cpuarray is not None:
464            cpuarray = iarray_s(cpuarray, &na, &sa)
465
466        if size is None: size = (toInt(na), toInt(PETSC_DECIDE))
467        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
468        cdef PetscInt bs=0, n=0, N=0
469        Vec_Sizes(size, bsize, &bs, &n, &N)
470        Sys_Layout(ccomm, bs, &n, &N)
471        if bs == PETSC_DECIDE: bs = 1
472        if na < n:  raise ValueError(
473            "array size %d and vector local size %d block size %d" %
474            (toInt(na), toInt(n), toInt(bs)))
475        cdef PetscVec newvec = NULL
476        if comm_size(ccomm) == 1:
477            CHKERR(VecCreateSeqHIPWithArrays(ccomm, bs, N, sa, gpuarray, &newvec))
478        else:
479            CHKERR(VecCreateMPIHIPWithArrays(ccomm, bs, n, N, sa, gpuarray, &newvec))
480        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
481
482        if cpuarray is not None:
483            self.set_attr('__array__', cpuarray)
484        return self
485
486    def createViennaCLWithArrays(
487        self,
488        cpuarray: Sequence[Scalar] | None = None,
489        viennaclvechandle: Any | None = None,  # FIXME What type is appropriate here?
490        size: LayoutSizeSpec | None = None,
491        bsize: int | None = None,
492        comm: Comm | None = None) -> Self:
493        """Create a `Type.VIENNACL` vector with optional arrays.
494
495        Collective.
496
497        Parameters
498        ----------
499        cpuarray
500            Host array. Will be lazily allocated if not provided.
501        viennaclvechandle
502            Address of the array on the GPU. Will be lazily allocated if
503            not provided.
504        size
505            Vector size.
506        bsize
507            Vector block size. If `None`, ``bsize = 1``.
508        comm
509            MPI communicator, defaults to `Sys.getDefaultComm`.
510
511        See Also
512        --------
513        petsc.VecCreateSeqViennaCLWithArrays
514        petsc.VecCreateMPIViennaCLWithArrays
515
516        """
517        cdef PetscInt na=0
518        cdef PetscScalar *sa=NULL
519        cdef PetscScalar *vclvec = NULL
520        if viennaclvechandle:
521            vclvec = <PetscScalar*>(<Py_uintptr_t>viennaclvechandle)
522        if cpuarray is not None:
523            cpuarray = iarray_s(cpuarray, &na, &sa)
524
525        if size is None: size = (toInt(na), toInt(PETSC_DECIDE))
526        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
527        cdef PetscInt bs=0, n=0, N=0
528        Vec_Sizes(size, bsize, &bs, &n, &N)
529        Sys_Layout(ccomm, bs, &n, &N)
530        if bs == PETSC_DECIDE: bs = 1
531        if na < n:
532            raise ValueError("array size %d and vector local size %d block size %d" % (toInt(na), toInt(n), toInt(bs)))
533        cdef PetscVec newvec = NULL
534        if comm_size(ccomm) == 1:
535            CHKERR(VecCreateSeqViennaCLWithArrays(ccomm, bs, N, sa, vclvec, &newvec))
536        else:
537            CHKERR(VecCreateMPIViennaCLWithArrays(ccomm, bs, n, N, sa, vclvec, &newvec))
538        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
539
540        if cpuarray is not None:
541            self.set_attr('__array__', cpuarray)
542        return self
543
544    # FIXME: object? Do we need to specify it? Can't we just use Any?
545    def createWithDLPack(
546        self,
547        object dltensor,
548        size: LayoutSizeSpec | None = None,
549        bsize: int | None = None,
550        comm: Comm | None = None) -> Self:
551        """Create a vector wrapping a DLPack object, sharing the same memory.
552
553        Collective.
554
555        This operation does not modify the storage of the original tensor and
556        should be used with contiguous tensors only. If the tensor is stored in
557        row-major order (e.g. PyTorch tensors), the resulting vector will look
558        like an unrolled tensor using row-major order.
559
560        The resulting vector type will be one of `Type.SEQ`, `Type.MPI`,
561        `Type.SEQCUDA`, `Type.MPICUDA`, `Type.SEQHIP` or
562        `Type.MPIHIP` depending on the type of ``dltensor`` and the number
563        of processes in the communicator.
564
565        Parameters
566        ----------
567        dltensor
568            Either an object with a ``__dlpack__`` method or a DLPack tensor object.
569        size
570            Vector size.
571        bsize
572            Vector block size. If `None`, ``bsize = 1``.
573        comm
574            MPI communicator, defaults to `Sys.getDefaultComm`.
575
576        """
577        cdef DLManagedTensor* ptr = NULL
578        cdef int bits = 0
579        cdef PetscInt nz = 1
580        cdef int64_t ndim = 0
581        cdef int64_t* shape = NULL
582        cdef int64_t* strides = NULL
583        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
584        cdef PetscInt bs = 0, n = 0, N = 0
585
586        if not PyCapsule_CheckExact(dltensor):
587            dltensor = dltensor.__dlpack__()
588
589        if PyCapsule_IsValid(dltensor, 'dltensor'):
590            ptr = <DLManagedTensor*>PyCapsule_GetPointer(dltensor, 'dltensor')
591            bits = ptr.dl_tensor.dtype.bits
592            if bits != 8*sizeof(PetscScalar):
593                raise TypeError("Tensor dtype = {} does not match PETSc precision".format(ptr.dl_tensor.dtype))
594            ndim = ptr.dl_tensor.ndim
595            shape = ptr.dl_tensor.shape
596            for s in shape[:ndim]:
597                nz = nz*s
598            strides = ptr.dl_tensor.strides
599            PyCapsule_SetName(dltensor, 'used_dltensor')
600        else:
601            raise ValueError("Expect a dltensor field, pycapsule.PyCapsule can only be consumed once")
602        if size is None: size = (toInt(nz), toInt(PETSC_DECIDE))
603        Vec_Sizes(size, bsize, &bs, &n, &N)
604        Sys_Layout(ccomm, bs, &n, &N)
605        if bs == PETSC_DECIDE: bs = 1
606        if nz < n:  raise ValueError(
607            "array size %d and vector local size %d block size %d" %
608            (toInt(nz), toInt(n), toInt(bs)))
609        cdef PetscVec newvec = NULL
610        cdef PetscDLDeviceType dltype = ptr.dl_tensor.ctx.device_type
611        if dltype in [kDLCUDA, kDLCUDAManaged]:
612            if comm_size(ccomm) == 1:
613                CHKERR(VecCreateSeqCUDAWithArray(ccomm, bs, N, <PetscScalar*>(ptr.dl_tensor.data), &newvec))
614            else:
615                CHKERR(VecCreateMPICUDAWithArray(ccomm, bs, n, N, <PetscScalar*>(ptr.dl_tensor.data), &newvec))
616        elif dltype in [kDLCPU, kDLCUDAHost, kDLROCMHost]:
617            if comm_size(ccomm) == 1:
618                CHKERR(VecCreateSeqWithArray(ccomm, bs, N, <PetscScalar*>(ptr.dl_tensor.data), &newvec))
619            else:
620                CHKERR(VecCreateMPIWithArray(ccomm, bs, n, N, <PetscScalar*>(ptr.dl_tensor.data), &newvec))
621        elif dltype == kDLROCM:
622            if comm_size(ccomm) == 1:
623                CHKERR(VecCreateSeqHIPWithArray(ccomm, bs, N, <PetscScalar*>(ptr.dl_tensor.data), &newvec))
624            else:
625                CHKERR(VecCreateMPIHIPWithArray(ccomm, bs, n, N, <PetscScalar*>(ptr.dl_tensor.data), &newvec))
626        else:
627            raise TypeError("Device type {} not supported".format(dltype))
628
629        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
630        self.set_attr('__array__', dltensor)
631        cdef int64_t* shape_arr = NULL
632        cdef int64_t* strides_arr = NULL
633        cdef object s1 = oarray_p(empty_p(<PetscInt>ndim), NULL, <void**>&shape_arr)
634        cdef object s2 = oarray_p(empty_p(<PetscInt>ndim), NULL, <void**>&strides_arr)
635        for i in range(ndim):
636            shape_arr[i] = shape[i]
637            strides_arr[i] = strides[i]
638        self.set_attr('__dltensor_ctx__', (ptr.dl_tensor.ctx.device_type, ptr.dl_tensor.ctx.device_id, ndim, s1, s2))
639        if ptr.manager_deleter != NULL:
640            ptr.manager_deleter(ptr) # free the manager
641        return self
642
643    def attachDLPackInfo(
644        self,
645        Vec vec=None,
646        object dltensor=None) -> Self:
647        """Attach tensor information from another vector or DLPack tensor.
648
649        Logically collective.
650
651        This tensor information is required when converting a `Vec` to a
652        DLPack object.
653
654        Parameters
655        ----------
656        vec
657            Vector with attached tensor information. This is typically created
658            by calling `createWithDLPack`.
659        dltensor
660            DLPack tensor. This will only be used if ``vec`` is `None`.
661
662        Notes
663        -----
664        This operation does not copy any data from ``vec`` or ``dltensor``.
665
666        See Also
667        --------
668        clearDLPackInfo, createWithDLPack
669
670        """
671        cdef object ctx = None
672        cdef DLManagedTensor* ptr = NULL
673        cdef int64_t* shape_arr = NULL
674        cdef int64_t* strides_arr = NULL
675        cdef object s1 = None, s2 = None
676
677        if vec is None and dltensor is None:
678            raise ValueError('Missing input parameters')
679        if vec is not None:
680            ctx = (<Object>vec).get_attr('__dltensor_ctx__')
681            if ctx is None:
682                raise ValueError('Input vector has no tensor information')
683            self.set_attr('__dltensor_ctx__', ctx)
684        else:
685            if PyCapsule_IsValid(dltensor, 'dltensor'):
686                ptr = <DLManagedTensor*>PyCapsule_GetPointer(dltensor, 'dltensor')
687            elif PyCapsule_IsValid(dltensor, 'used_dltensor'):
688                ptr = <DLManagedTensor*>PyCapsule_GetPointer(dltensor, 'used_dltensor')
689            else:
690                raise ValueError("Expect a dltensor or used_dltensor field")
691            bits = ptr.dl_tensor.dtype.bits
692            if bits != 8*sizeof(PetscScalar):
693                raise TypeError("Tensor dtype = {} does not match PETSc precision".format(ptr.dl_tensor.dtype))
694            ndim = ptr.dl_tensor.ndim
695            shape = ptr.dl_tensor.shape
696            strides = ptr.dl_tensor.strides
697            s1 = oarray_p(empty_p(ndim), NULL, <void**>&shape_arr)
698            s2 = oarray_p(empty_p(ndim), NULL, <void**>&strides_arr)
699            for i in range(ndim):
700                shape_arr[i] = shape[i]
701                strides_arr[i] = strides[i]
702            self.set_attr('__dltensor_ctx__', (ptr.dl_tensor.ctx.device_type, ptr.dl_tensor.ctx.device_id, ndim, s1, s2))
703        return self
704
705    def clearDLPackInfo(self) -> Self:
706        """Clear tensor information.
707
708        Logically collective.
709
710        See Also
711        --------
712        attachDLPackInfo, createWithDLPack
713
714        """
715        self.set_attr('__dltensor_ctx__', None)
716        return self
717
718    # TODO Stream
719    def __dlpack__(self, stream=-1):
720        return self.toDLPack('rw')
721
722    def __dlpack_device__(self):
723        (dltype, devId, _, _, _) = vec_get_dlpack_ctx(self)
724        return (dltype, devId)
725
726    def toDLPack(self, mode: AccessModeSpec = 'rw') -> Any:
727        """Return a DLPack `PyCapsule` wrapping the vector data.
728
729        Collective.
730
731        Parameters
732        ----------
733        mode
734            Access mode for the vector.
735
736        Returns
737        -------
738        `PyCapsule`
739            Capsule of a DLPack tensor wrapping a `Vec`.
740
741        Notes
742        -----
743        It is important that the access mode is respected by the consumer
744        as this is not enforced internally.
745
746        See Also
747        --------
748        createWithDLPack
749
750        """
751        if mode is None: mode = 'rw'
752        if mode not in ['rw', 'r', 'w']:
753            raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'")
754
755        cdef int64_t ndim = 0
756        (device_type, device_id, ndim, shape, strides) = vec_get_dlpack_ctx(self)
757        hostmem = (device_type == kDLCPU)
758
759        cdef DLManagedTensor* dlm_tensor = <DLManagedTensor*>malloc(sizeof(DLManagedTensor))
760        cdef DLTensor* dl_tensor = &dlm_tensor.dl_tensor
761        cdef PetscScalar *a = NULL
762        cdef int64_t* shape_strides = NULL
763        dl_tensor.byte_offset = 0
764
765        # DLPack does not currently play well with our get/restore model
766        # Call restore right-away and hope that the consumer will do the right thing
767        # and not modify memory requested with read access
768        # By restoring now, we guarantee the sanity of the ObjectState
769        if mode == 'w':
770            if hostmem:
771                CHKERR(VecGetArrayWrite(self.vec, <PetscScalar**>&a))
772                CHKERR(VecRestoreArrayWrite(self.vec, NULL))
773            else:
774                CHKERR(VecGetArrayWriteAndMemType(self.vec, <PetscScalar**>&a, NULL))
775                CHKERR(VecRestoreArrayWriteAndMemType(self.vec, NULL))
776        elif mode == 'r':
777            if hostmem:
778                CHKERR(VecGetArrayRead(self.vec, <const PetscScalar**>&a))
779                CHKERR(VecRestoreArrayRead(self.vec, NULL))
780            else:
781                CHKERR(VecGetArrayReadAndMemType(self.vec, <const PetscScalar**>&a, NULL))
782                CHKERR(VecRestoreArrayReadAndMemType(self.vec, NULL))
783        else:
784            if hostmem:
785                CHKERR(VecGetArray(self.vec, <PetscScalar**>&a))
786                CHKERR(VecRestoreArray(self.vec, NULL))
787            else:
788                CHKERR(VecGetArrayAndMemType(self.vec, <PetscScalar**>&a, NULL))
789                CHKERR(VecRestoreArrayAndMemType(self.vec, NULL))
790        dl_tensor.data = <void *>a
791
792        cdef DLContext* ctx = &dl_tensor.ctx
793        ctx.device_type = device_type
794        ctx.device_id = device_id
795        shape_strides = <int64_t*>malloc(sizeof(int64_t)*2*ndim)
796        for i in range(ndim):
797            shape_strides[i] = shape[i]
798        for i in range(ndim):
799            shape_strides[i+ndim] = strides[i]
800        dl_tensor.ndim = <int>ndim
801        dl_tensor.shape = shape_strides
802        dl_tensor.strides = shape_strides + ndim
803
804        cdef DLDataType* dtype = &dl_tensor.dtype
805        dtype.code = <uint8_t>DLDataTypeCode.kDLFloat
806        if sizeof(PetscScalar) == 8:
807            dtype.bits = <uint8_t>64
808        elif sizeof(PetscScalar) == 4:
809            dtype.bits = <uint8_t>32
810        else:
811            raise ValueError('Unsupported PetscScalar type')
812        dtype.lanes = <uint16_t>1
813        dlm_tensor.manager_ctx = <void *>self.vec
814        CHKERR(PetscObjectReference(<PetscObject>self.vec))
815        dlm_tensor.manager_deleter = manager_deleter
816        dlm_tensor.del_obj = <dlpack_manager_del_obj>PetscDEALLOC
817        return PyCapsule_New(dlm_tensor, 'dltensor', pycapsule_deleter)
818
819    def createGhost(
820        self,
821        ghosts: Sequence[int],
822        size: LayoutSizeSpec,
823        bsize: int | None = None,
824        comm: Comm | None = None) -> Self:
825        """Create a parallel vector with ghost padding on each processor.
826
827        Collective.
828
829        Parameters
830        ----------
831        ghosts
832            Global indices of ghost points.
833        size
834            Vector size.
835        bsize
836            Vector block size. If `None`, ``bsize = 1``.
837        comm
838            MPI communicator, defaults to `Sys.getDefaultComm`.
839
840        See Also
841        --------
842        createGhostWithArray, petsc.VecCreateGhost, petsc.VecCreateGhostBlock
843
844        """
845        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
846        cdef PetscInt ng=0, *ig=NULL
847        ghosts = iarray_i(ghosts, &ng, &ig)
848        cdef PetscInt bs=0, n=0, N=0
849        Vec_Sizes(size, bsize, &bs, &n, &N)
850        Sys_Layout(ccomm, bs, &n, &N)
851        cdef PetscVec newvec = NULL
852        if bs == PETSC_DECIDE:
853            CHKERR(VecCreateGhost(
854                    ccomm, n, N, ng, ig, &newvec))
855        else:
856            CHKERR(VecCreateGhostBlock(
857                    ccomm, bs, n, N, ng, ig, &newvec))
858        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
859        return self
860
861    def createGhostWithArray(
862        self,
863        ghosts: Sequence[int],
864        array: Sequence[Scalar],
865        size: LayoutSizeSpec | None = None,
866        bsize: int | None = None,
867        comm: Comm | None = None) -> Self:
868        """Create a parallel vector with ghost padding and provided arrays.
869
870        Collective.
871
872        Parameters
873        ----------
874        ghosts
875            Global indices of ghost points.
876        array
877            Array to store the vector values. Must be at least as large as
878            the local size of the vector (including ghost points).
879        size
880            Vector size.
881        bsize
882            Vector block size. If `None`, ``bsize = 1``.
883        comm
884            MPI communicator, defaults to `Sys.getDefaultComm`.
885
886        See Also
887        --------
888        createGhost, petsc.VecCreateGhostWithArray
889        petsc.VecCreateGhostBlockWithArray
890
891        """
892        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
893        cdef PetscInt ng=0, *ig=NULL
894        ghosts = iarray_i(ghosts, &ng, &ig)
895        cdef PetscInt na=0
896        cdef PetscScalar *sa=NULL
897        array = oarray_s(array, &na, &sa)
898        cdef PetscInt b = 1 if bsize is None else asInt(bsize)
899        if size is None: size = (toInt(na-ng*b), toInt(PETSC_DECIDE))
900        cdef PetscInt bs=0, n=0, N=0
901        Vec_Sizes(size, bsize, &bs, &n, &N)
902        Sys_Layout(ccomm, bs, &n, &N)
903        if na < (n+ng*b): raise ValueError(
904            "ghosts size %d, array size %d, and "
905            "vector local size %d block size %d" %
906            (toInt(ng), toInt(na), toInt(n), toInt(b)))
907        cdef PetscVec newvec = NULL
908        if bs == PETSC_DECIDE:
909            CHKERR(VecCreateGhostWithArray(
910                    ccomm, n, N, ng, ig, sa, &newvec))
911        else:
912            CHKERR(VecCreateGhostBlockWithArray(
913                    ccomm, bs, n, N, ng, ig, sa, &newvec))
914        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
915        self.set_attr('__array__', array)
916        return self
917
918    def createShared(
919        self,
920        size: LayoutSizeSpec,
921        bsize: int | None = None,
922        comm: Comm | None = None) -> Self:
923        """Create a `Type.SHARED` vector that uses shared memory.
924
925        Collective.
926
927        Parameters
928        ----------
929        size
930            Vector size.
931        bsize
932            Vector block size. If `None`, ``bsize = 1``.
933        comm
934            MPI communicator, defaults to `Sys.getDefaultComm`.
935
936        See Also
937        --------
938        petsc.VecCreateShared
939
940        """
941        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
942        cdef PetscInt bs=0, n=0, N=0
943        Vec_Sizes(size, bsize, &bs, &n, &N)
944        Sys_Layout(ccomm, bs, &n, &N)
945        cdef PetscVec newvec = NULL
946        CHKERR(VecCreateShared(ccomm, n, N, &newvec))
947        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
948        if bs != PETSC_DECIDE:
949            CHKERR(VecSetBlockSize(self.vec, bs))
950        return self
951
952    def createNest(
953        self,
954        vecs: Sequence[Vec],
955        isets: Sequence[IS] | None = None,
956        comm: Comm | None = None) -> Self:
957        """Create a `Type.NEST` vector containing multiple nested subvectors.
958
959        Collective.
960
961        Parameters
962        ----------
963        vecs
964            Iterable of subvectors.
965        isets
966            Iterable of index sets for each nested subvector.
967            Defaults to contiguous ordering.
968        comm
969            MPI communicator, defaults to `Sys.getDefaultComm`.
970
971        See Also
972        --------
973        petsc.VecCreateNest
974
975        """
976        vecs = list(vecs)
977        if isets:
978            isets = list(isets)
979            assert len(isets) == len(vecs)
980        else:
981            isets = None
982        cdef MPI_Comm ccomm = def_Comm(comm, PETSC_COMM_DEFAULT)
983        cdef Py_ssize_t i, m = len(vecs)
984        cdef PetscInt n = <PetscInt>m
985        cdef PetscVec *cvecs  = NULL
986        cdef PetscIS  *cisets = NULL
987        cdef object unused1, unused2
988        unused1 = oarray_p(empty_p(n), NULL, <void**>&cvecs)
989        for i from 0 <= i < m: cvecs[i] = (<Vec?>vecs[i]).vec
990        if isets is not None:
991            unused2 = oarray_p(empty_p(n), NULL, <void**>&cisets)
992            for i from 0 <= i < m: cisets[i] = (<IS?>isets[i]).iset
993        cdef PetscVec newvec = NULL
994        CHKERR(VecCreateNest(ccomm, n, cisets, cvecs, &newvec))
995        CHKERR(PetscCLEAR(self.obj)); self.vec = newvec
996        return self
997
998    #
999
1000    def setOptionsPrefix(self, prefix: str | None) -> None:
1001        """Set the prefix used for searching for options in the database.
1002
1003        Logically collective.
1004
1005        See Also
1006        --------
1007        petsc_options, getOptionsPrefix, petsc.VecSetOptionsPrefix
1008
1009        """
1010        cdef const char *cval = NULL
1011        prefix = str2bytes(prefix, &cval)
1012        CHKERR(VecSetOptionsPrefix(self.vec, cval))
1013
1014    def getOptionsPrefix(self) -> str:
1015        """Return the prefix used for searching for options in the database.
1016
1017        Not collective.
1018
1019        See Also
1020        --------
1021        petsc_options, setOptionsPrefix, petsc.VecGetOptionsPrefix
1022
1023        """
1024        cdef const char *cval = NULL
1025        CHKERR(VecGetOptionsPrefix(self.vec, &cval))
1026        return bytes2str(cval)
1027
1028    def appendOptionsPrefix(self, prefix: str | None) -> None:
1029        """Append to the prefix used for searching for options in the database.
1030
1031        Logically collective.
1032
1033        See Also
1034        --------
1035        petsc_options, setOptionsPrefix, petsc.VecAppendOptionsPrefix
1036
1037        """
1038        cdef const char *cval = NULL
1039        prefix = str2bytes(prefix, &cval)
1040        CHKERR(VecAppendOptionsPrefix(self.vec, cval))
1041
1042    def setFromOptions(self) -> None:
1043        """Configure the vector from the options database.
1044
1045        Collective.
1046
1047        See Also
1048        --------
1049        petsc_options, petsc.VecSetFromOptions
1050
1051        """
1052        CHKERR(VecSetFromOptions(self.vec))
1053
1054    def setUp(self) -> Self:
1055        """Set up the internal data structures for using the vector.
1056
1057        Collective.
1058
1059        See Also
1060        --------
1061        create, destroy, petsc.VecSetUp
1062
1063        """
1064        CHKERR(VecSetUp(self.vec))
1065        return self
1066
1067    def setOption(self, option: Option, flag: bool) -> None:
1068        """Set option.
1069
1070        Collective.
1071
1072        See Also
1073        --------
1074        petsc.VecSetOption
1075
1076        """
1077        CHKERR(VecSetOption(self.vec, option, flag))
1078
1079    def getType(self) -> str:
1080        """Return the type of the vector.
1081
1082        Not collective.
1083
1084        See Also
1085        --------
1086        setType, petsc.VecGetType
1087
1088        """
1089        cdef PetscVecType cval = NULL
1090        CHKERR(VecGetType(self.vec, &cval))
1091        return bytes2str(cval)
1092
1093    def getSize(self) -> int:
1094        """Return the global size of the vector.
1095
1096        Not collective.
1097
1098        See Also
1099        --------
1100        setSizes, getLocalSize, petsc.VecGetSize
1101
1102        """
1103        cdef PetscInt N = 0
1104        CHKERR(VecGetSize(self.vec, &N))
1105        return toInt(N)
1106
1107    def getLocalSize(self) -> int:
1108        """Return the local size of the vector.
1109
1110        Not collective.
1111
1112        See Also
1113        --------
1114        setSizes, getSize, petsc.VecGetLocalSize
1115
1116        """
1117        cdef PetscInt n = 0
1118        CHKERR(VecGetLocalSize(self.vec, &n))
1119        return toInt(n)
1120
1121    def getSizes(self) -> LayoutSizeSpec:
1122        """Return the vector sizes.
1123
1124        Not collective.
1125
1126        See Also
1127        --------
1128        getSize, getLocalSize, petsc.VecGetLocalSize, petsc.VecGetSize
1129
1130        """
1131        cdef PetscInt n = 0, N = 0
1132        CHKERR(VecGetLocalSize(self.vec, &n))
1133        CHKERR(VecGetSize(self.vec, &N))
1134        return (toInt(n), toInt(N))
1135
1136    def setBlockSize(self, bsize: int) -> None:
1137        """Set the block size of the vector.
1138
1139        Logically collective.
1140
1141        See Also
1142        --------
1143        petsc.VecSetBlockSize
1144
1145        """
1146        cdef PetscInt bs = asInt(bsize)
1147        CHKERR(VecSetBlockSize(self.vec, bs))
1148
1149    def getBlockSize(self) -> int:
1150        """Return the block size of the vector.
1151
1152        Not collective.
1153
1154        See Also
1155        --------
1156        petsc.VecGetBlockSize
1157
1158        """
1159        cdef PetscInt bs=0
1160        CHKERR(VecGetBlockSize(self.vec, &bs))
1161        return toInt(bs)
1162
1163    def getOwnershipRange(self) -> tuple[int, int]:
1164        """Return the locally owned range of indices ``(start, end)``.
1165
1166        Not collective.
1167
1168        Returns
1169        -------
1170        start : int
1171            The first local element.
1172        end : int
1173            One more than the last local element.
1174
1175        See Also
1176        --------
1177        getOwnershipRanges, petsc.VecGetOwnershipRange
1178
1179        """
1180        cdef PetscInt low=0, high=0
1181        CHKERR(VecGetOwnershipRange(self.vec, &low, &high))
1182        return (toInt(low), toInt(high))
1183
1184    def getOwnershipRanges(self) -> ArrayInt:
1185        """Return the range of indices owned by each process.
1186
1187        Not collective.
1188
1189        The returned array is the result of exclusive scan of the local sizes.
1190
1191        See Also
1192        --------
1193        getOwnershipRange, petsc.VecGetOwnershipRanges
1194
1195        """
1196        cdef const PetscInt *rng = NULL
1197        CHKERR(VecGetOwnershipRanges(self.vec, &rng))
1198        cdef MPI_Comm comm = MPI_COMM_NULL
1199        CHKERR(PetscObjectGetComm(<PetscObject>self.vec, &comm))
1200        cdef int size = -1
1201        CHKERR(<PetscErrorCode>MPI_Comm_size(comm, &size))
1202        return array_i(size+1, rng)
1203
1204    def createLocalVector(self) -> Vec:
1205        """Create a local vector.
1206
1207        Not collective.
1208
1209        Returns
1210        -------
1211        Vec
1212            The local vector.
1213
1214        See Also
1215        --------
1216        getLocalVector, petsc.VecCreateLocalVector
1217
1218        """
1219        lvec = Vec()
1220        CHKERR(VecCreateLocalVector(self.vec, &lvec.vec))
1221        return lvec
1222
1223    def getLocalVector(self, Vec lvec, readonly: bool = False) -> None:
1224        """Maps the local portion of the vector into a local vector.
1225
1226        Logically collective.
1227
1228        Parameters
1229        ----------
1230        lvec
1231            The local vector obtained from `createLocalVector`.
1232        readonly
1233            Request read-only access.
1234
1235        See Also
1236        --------
1237        createLocalVector, restoreLocalVector, petsc.VecGetLocalVectorRead
1238        petsc.VecGetLocalVector
1239
1240        """
1241        if readonly:
1242            CHKERR(VecGetLocalVectorRead(self.vec, lvec.vec))
1243        else:
1244            CHKERR(VecGetLocalVector(self.vec, lvec.vec))
1245
1246    def restoreLocalVector(self, Vec lvec, readonly: bool = False) -> None:
1247        """Unmap a local access obtained with `getLocalVector`.
1248
1249        Logically collective.
1250
1251        Parameters
1252        ----------
1253        lvec
1254            The local vector.
1255        readonly
1256            Request read-only access.
1257
1258        See Also
1259        --------
1260        createLocalVector, getLocalVector, petsc.VecRestoreLocalVectorRead
1261        petsc.VecRestoreLocalVector
1262
1263        """
1264        if readonly:
1265            CHKERR(VecRestoreLocalVectorRead(self.vec, lvec.vec))
1266        else:
1267            CHKERR(VecRestoreLocalVector(self.vec, lvec.vec))
1268
1269    # FIXME Return type should be more specific
1270    def getBuffer(self, readonly: bool = False) -> Any:
1271        """Return a buffered view of the local portion of the vector.
1272
1273        Logically collective.
1274
1275        Parameters
1276        ----------
1277        readonly
1278            Request read-only access.
1279
1280        Returns
1281        -------
1282        typing.Any
1283            `Buffer object <python:c-api/buffer>` wrapping the local portion of
1284            the vector data. This can be used either as a context manager
1285            providing access as a numpy array or can be passed to array
1286            constructors accepting buffered objects such as `numpy.asarray`.
1287
1288        Examples
1289        --------
1290        Accessing the data with a context manager:
1291
1292        >>> vec = PETSc.Vec().createWithArray([1, 2, 3])
1293        >>> with vec.getBuffer() as arr:
1294        ...     arr
1295        array([1., 2., 3.])
1296
1297        Converting the buffer to an `ndarray`:
1298
1299        >>> buf = PETSc.Vec().createWithArray([1, 2, 3]).getBuffer()
1300        >>> np.asarray(buf)
1301        array([1., 2., 3.])
1302
1303        See Also
1304        --------
1305        getArray
1306
1307        """
1308        if readonly:
1309            return vec_getbuffer_r(self)
1310        else:
1311            return vec_getbuffer_w(self)
1312
1313    def getArray(self, readonly: bool = False) -> ArrayScalar:
1314        """Return local portion of the vector as an `ndarray`.
1315
1316        Logically collective.
1317
1318        Parameters
1319        ----------
1320        readonly
1321            Request read-only access.
1322
1323        See Also
1324        --------
1325        setArray, getBuffer
1326
1327        """
1328        if readonly:
1329            return vec_getarray_r(self)
1330        else:
1331            return vec_getarray_w(self)
1332
1333    def setArray(self, array: Sequence[Scalar]) -> None:
1334        """Set values for the local portion of the vector.
1335
1336        Logically collective.
1337
1338        See Also
1339        --------
1340        placeArray
1341
1342        """
1343        vec_setarray(self, array)
1344
1345    def placeArray(self, array: Sequence[Scalar]) -> None:
1346        """Set the local portion of the vector to a provided array.
1347
1348        Not collective.
1349
1350        See Also
1351        --------
1352        resetArray, setArray, petsc.VecPlaceArray
1353
1354        """
1355        cdef PetscInt nv=0
1356        cdef PetscInt na=0
1357        cdef PetscScalar *a = NULL
1358        CHKERR(VecGetLocalSize(self.vec, &nv))
1359        array = oarray_s(array, &na, &a)
1360        if (na != nv): raise ValueError(
1361            "cannot place input array size %d, vector size %d" %
1362            (toInt(na), toInt(nv)))
1363        CHKERR(VecPlaceArray(self.vec, a))
1364        self.set_attr('__placed_array__', array)
1365
1366    def resetArray(self, force: bool = False) -> ArrayScalar | None:
1367        """Reset the vector to use its default array.
1368
1369        Not collective.
1370
1371        Parameters
1372        ----------
1373        force
1374            Force the calling of `petsc.VecResetArray` even if no user array
1375            has been placed with `placeArray`.
1376
1377        Returns
1378        -------
1379        ArrayScalar
1380            The array previously provided by the user with `placeArray`.
1381            Can be `None` if ``force`` is `True` and no array was placed
1382            before.
1383
1384        See Also
1385        --------
1386        placeArray, petsc.VecResetArray
1387
1388        """
1389        cdef object array = None
1390        array = self.get_attr('__placed_array__')
1391        if array is None and not force: return None
1392        CHKERR(VecResetArray(self.vec))
1393        self.set_attr('__placed_array__', None)
1394        return array
1395
1396    def bindToCPU(self, flg: bool) -> None:
1397        """Bind vector operations execution on the CPU.
1398
1399        Logically collective.
1400
1401        See Also
1402        --------
1403        boundToCPU, petsc.VecBindToCPU
1404
1405        """
1406        cdef PetscBool bindFlg = asBool(flg)
1407        CHKERR(VecBindToCPU(self.vec, bindFlg))
1408
1409    def boundToCPU(self) -> bool:
1410        """Return whether the vector has been bound to the CPU.
1411
1412        Not collective.
1413
1414        See Also
1415        --------
1416        bindToCPU, petsc.VecBoundToCPU
1417
1418        """
1419        cdef PetscBool flg = PETSC_TRUE
1420        CHKERR(VecBoundToCPU(self.vec, &flg))
1421        return toBool(flg)
1422
1423    def getCUDAHandle(
1424        self,
1425        mode: AccessModeSpec = 'rw') -> Any:  # FIXME What is the right return type?
1426        """Return a pointer to the device buffer.
1427
1428        Not collective.
1429
1430        The returned pointer should be released using `restoreCUDAHandle`
1431        with the same access mode.
1432
1433        Returns
1434        -------
1435        typing.Any
1436            CUDA device pointer.
1437
1438        Notes
1439        -----
1440        This method may incur a host-to-device copy if the device data is
1441        out of date and ``mode`` is ``"r"`` or ``"rw"``.
1442
1443        See Also
1444        --------
1445        restoreCUDAHandle, petsc.VecCUDAGetArray, petsc.VecCUDAGetArrayRead
1446        petsc.VecCUDAGetArrayWrite
1447
1448        """
1449        cdef PetscScalar *hdl = NULL
1450        cdef const char *m = NULL
1451        if mode is not None: mode = str2bytes(mode, &m)
1452        if m == NULL or (m[0] == c'r' and m[1] == c'w'):
1453            CHKERR(VecCUDAGetArray(self.vec, &hdl))
1454        elif m[0] == c'r':
1455            CHKERR(VecCUDAGetArrayRead(self.vec, <const PetscScalar**>&hdl))
1456        elif m[0] == c'w':
1457            CHKERR(VecCUDAGetArrayWrite(self.vec, &hdl))
1458        else:
1459            raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'")
1460        return <Py_uintptr_t>hdl
1461
1462    def restoreCUDAHandle(
1463        self,
1464        handle: Any,  # FIXME What type hint is appropriate?
1465        mode: AccessModeSpec = 'rw') -> None:
1466        """Restore a pointer to the device buffer obtained with `getCUDAHandle`.
1467
1468        Not collective.
1469
1470        Parameters
1471        ----------
1472        handle
1473            CUDA device pointer.
1474        mode
1475            Access mode.
1476
1477        See Also
1478        --------
1479        getCUDAHandle, petsc.VecCUDARestoreArray
1480        petsc.VecCUDARestoreArrayRead, petsc.VecCUDARestoreArrayWrite
1481
1482        """
1483        cdef PetscScalar *hdl = <PetscScalar*>(<Py_uintptr_t>handle)
1484        cdef const char *m = NULL
1485        if mode is not None: mode = str2bytes(mode, &m)
1486        if m == NULL or (m[0] == c'r' and m[1] == c'w'):
1487            CHKERR(VecCUDARestoreArray(self.vec, &hdl))
1488        elif m[0] == c'r':
1489            CHKERR(VecCUDARestoreArrayRead(self.vec, <const PetscScalar**>&hdl))
1490        elif m[0] == c'w':
1491            CHKERR(VecCUDARestoreArrayWrite(self.vec, &hdl))
1492        else:
1493            raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'")
1494
1495    def getHIPHandle(
1496        self,
1497        mode: AccessModeSpec = 'rw') -> Any:  # FIXME What is the right return type?
1498        """Return a pointer to the device buffer.
1499
1500        Not collective.
1501
1502        The returned pointer should be released using `restoreHIPHandle`
1503        with the same access mode.
1504
1505        Returns
1506        -------
1507        typing.Any
1508            HIP device pointer.
1509
1510        Notes
1511        -----
1512        This method may incur a host-to-device copy if the device data is
1513        out of date and ``mode`` is ``"r"`` or ``"rw"``.
1514
1515        See Also
1516        --------
1517        restoreHIPHandle, petsc.VecHIPGetArray, petsc.VecHIPGetArrayRead
1518        petsc.VecHIPGetArrayWrite
1519
1520        """
1521        cdef PetscScalar *hdl = NULL
1522        cdef const char *m = NULL
1523        if mode is not None: mode = str2bytes(mode, &m)
1524        if m == NULL or (m[0] == c'r' and m[1] == c'w'):
1525            CHKERR(VecHIPGetArray(self.vec, &hdl))
1526        elif m[0] == c'r':
1527            CHKERR(VecHIPGetArrayRead(self.vec, <const PetscScalar**>&hdl))
1528        elif m[0] == c'w':
1529            CHKERR(VecHIPGetArrayWrite(self.vec, &hdl))
1530        else:
1531            raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'")
1532        return <Py_uintptr_t>hdl
1533
1534    def restoreHIPHandle(
1535        self,
1536        handle: Any,  # FIXME What type hint is appropriate?
1537        mode: AccessModeSpec = 'rw') -> None:
1538        """Restore a pointer to the device buffer obtained with `getHIPHandle`.
1539
1540        Not collective.
1541
1542        Parameters
1543        ----------
1544        handle
1545            HIP device pointer.
1546        mode
1547            Access mode.
1548
1549        See Also
1550        --------
1551        getHIPHandle, petsc.VecHIPRestoreArray, petsc.VecHIPRestoreArrayRead
1552        petsc.VecHIPRestoreArrayWrite
1553
1554        """
1555        cdef PetscScalar *hdl = <PetscScalar*>(<Py_uintptr_t>handle)
1556        cdef const char *m = NULL
1557        if mode is not None: mode = str2bytes(mode, &m)
1558        if m == NULL or (m[0] == c'r' and m[1] == c'w'):
1559            CHKERR(VecHIPRestoreArray(self.vec, &hdl))
1560        elif m[0] == c'r':
1561            CHKERR(VecHIPRestoreArrayRead(self.vec, <const PetscScalar**>&hdl))
1562        elif m[0] == c'w':
1563            CHKERR(VecHIPRestoreArrayWrite(self.vec, &hdl))
1564        else:
1565            raise ValueError("Invalid mode: expected 'rw', 'r', or 'w'")
1566
1567    def getOffloadMask(self) -> int:
1568        """Return the offloading status of the vector.
1569
1570        Not collective.
1571
1572        Common return values include:
1573
1574        - 1: ``PETSC_OFFLOAD_CPU`` - CPU has valid entries
1575        - 2: ``PETSC_OFFLOAD_GPU`` - GPU has valid entries
1576        - 3: ``PETSC_OFFLOAD_BOTH`` - CPU and GPU are in sync
1577
1578        Returns
1579        -------
1580        int
1581            Enum value from `petsc.PetscOffloadMask` describing the offloading
1582            status.
1583
1584        See Also
1585        --------
1586        petsc.VecGetOffloadMask, petsc.PetscOffloadMask
1587
1588        """
1589        cdef PetscOffloadMask mask = PETSC_OFFLOAD_UNALLOCATED
1590        CHKERR(VecGetOffloadMask(self.vec, &mask))
1591        return mask
1592
1593    def getCLContextHandle(self) -> int:
1594        """Return the OpenCL context associated with the vector.
1595
1596        Not collective.
1597
1598        Returns
1599        -------
1600        int
1601            Pointer to underlying CL context. This can be used with
1602            `pyopencl` through `pyopencl.Context.from_int_ptr`.
1603
1604        See Also
1605        --------
1606        getCLQueueHandle, petsc.VecViennaCLGetCLContext
1607
1608        """
1609        cdef Py_uintptr_t ctxhdl = 0
1610        CHKERR(VecViennaCLGetCLContext(self.vec, &ctxhdl))
1611        return ctxhdl
1612
1613    def getCLQueueHandle(self) -> int:
1614        """Return the OpenCL command queue associated with the vector.
1615
1616        Not collective.
1617
1618        Returns
1619        -------
1620        int
1621            Pointer to underlying CL command queue. This can be used with
1622            `pyopencl` through `pyopencl.Context.from_int_ptr`.
1623
1624        See Also
1625        --------
1626        getCLContextHandle, petsc.VecViennaCLGetCLQueue
1627
1628        """
1629        cdef Py_uintptr_t queuehdl = 0
1630        CHKERR(VecViennaCLGetCLQueue(self.vec, &queuehdl))
1631        return queuehdl
1632
1633    def getCLMemHandle(
1634        self,
1635        mode: AccessModeSpec = 'rw') -> int:
1636        """Return the OpenCL buffer associated with the vector.
1637
1638        Not collective.
1639
1640        Returns
1641        -------
1642        int
1643            Pointer to the device buffer. This can be used with
1644            `pyopencl` through `pyopencl.Context.from_int_ptr`.
1645
1646        Notes
1647        -----
1648        This method may incur a host-to-device copy if the device data is
1649        out of date and ``mode`` is ``"r"`` or ``"rw"``.
1650
1651        See Also
1652        --------
1653        restoreCLMemHandle, petsc.VecViennaCLGetCLMem
1654        petsc.VecViennaCLGetCLMemRead, petsc.VecViennaCLGetCLMemWrite
1655
1656        """
1657        cdef Py_uintptr_t memhdl = 0
1658        cdef const char *m = NULL
1659        mode = str2bytes(mode, &m)
1660        if m == NULL or (m[0] == c'r' and m[1] == c'w'):
1661            CHKERR(VecViennaCLGetCLMem(self.vec, &memhdl))
1662        elif m[0] == c'r':
1663            CHKERR(VecViennaCLGetCLMemRead(self.vec, &memhdl))
1664        elif m[0] == c'w':
1665            CHKERR(VecViennaCLGetCLMemWrite(self.vec, &memhdl))
1666        else:
1667            raise ValueError("Invalid mode: expected 'r', 'w' or 'rw'")
1668        return memhdl
1669
1670    def restoreCLMemHandle(self) -> None:
1671        """Restore a pointer to the OpenCL buffer obtained with `getCLMemHandle`.
1672
1673        Not collective.
1674
1675        See Also
1676        --------
1677        getCLMemHandle, petsc.VecViennaCLRestoreCLMemWrite
1678
1679        """
1680        CHKERR(VecViennaCLRestoreCLMemWrite(self.vec))
1681
1682    def duplicate(self, array: Sequence[Scalar] | None = None) -> Vec:
1683        """Create a new vector with the same type, optionally with data.
1684
1685        Collective.
1686
1687        Parameters
1688        ----------
1689        array
1690            Optional values to store in the new vector.
1691
1692        See Also
1693        --------
1694        copy, petsc.VecDuplicate
1695
1696        """
1697        cdef Vec vec = type(self)()
1698        CHKERR(VecDuplicate(self.vec, &vec.vec))
1699        # duplicate tensor context
1700        cdef object ctx0 = self.get_attr('__dltensor_ctx__')
1701        if ctx0 is not None:
1702            vec.set_attr('__dltensor_ctx__', ctx0)
1703        if array is not None:
1704            vec_setarray(vec, array)
1705        return vec
1706
1707    def copy(self, Vec result=None) -> Vec:
1708        """Return a copy of the vector.
1709
1710        Logically collective.
1711
1712        This operation copies vector entries to the new vector.
1713
1714        Parameters
1715        ----------
1716        result
1717            Target vector for the copy. If `None` then a new vector is
1718            created internally.
1719
1720        See Also
1721        --------
1722        duplicate, petsc.VecCopy
1723
1724        """
1725        if result is None:
1726            result = type(self)()
1727        if result.vec == NULL:
1728            CHKERR(VecDuplicate(self.vec, &result.vec))
1729        CHKERR(VecCopy(self.vec, result.vec))
1730        return result
1731
1732    def chop(self, tol: float) -> None:
1733        """Set all vector entries less than some absolute tolerance to zero.
1734
1735        Collective.
1736
1737        Parameters
1738        ----------
1739        tol
1740            The absolute tolerance below which entries are set to zero.
1741
1742        See Also
1743        --------
1744        petsc.VecFilter
1745
1746        """
1747        cdef PetscReal rval = asReal(tol)
1748        CHKERR(VecFilter(self.vec, rval))
1749
1750    def load(self, Viewer viewer) -> Self:
1751        """Load a vector.
1752
1753        Collective.
1754
1755        See Also
1756        --------
1757        view, petsc.VecLoad
1758
1759        """
1760        cdef MPI_Comm comm = MPI_COMM_NULL
1761        cdef PetscObject obj = <PetscObject>(viewer.vwr)
1762        if self.vec == NULL:
1763            CHKERR(PetscObjectGetComm(obj, &comm))
1764            CHKERR(VecCreate(comm, &self.vec))
1765        CHKERR(VecLoad(self.vec, viewer.vwr))
1766        return self
1767
1768    def equal(self, Vec vec) -> bool:
1769        """Return whether the vector is equal to another.
1770
1771        Collective.
1772
1773        Parameters
1774        ----------
1775        vec
1776            Vector to compare with.
1777
1778        See Also
1779        --------
1780        petsc.VecEqual
1781
1782        """
1783        cdef PetscBool flag = PETSC_FALSE
1784        CHKERR(VecEqual(self.vec, vec.vec, &flag))
1785        return toBool(flag)
1786
1787    def dot(self, Vec vec) -> Scalar:
1788        """Return the dot product with ``vec``.
1789
1790        Collective.
1791
1792        For complex numbers this computes yᴴ·x with ``self`` as x, ``vec``
1793        as y and where yᴴ denotes the conjugate transpose of y.
1794
1795        Use `tDot` for the indefinite form yᵀ·x where yᵀ denotes the
1796        transpose of y.
1797
1798        Parameters
1799        ----------
1800        vec
1801            Vector to compute the dot product with.
1802
1803        See Also
1804        --------
1805        dotBegin, dotEnd, tDot, petsc.VecDot
1806
1807        """
1808        cdef PetscScalar sval = 0
1809        CHKERR(VecDot(self.vec, vec.vec, &sval))
1810        return toScalar(sval)
1811
1812    def dotBegin(self, Vec vec) -> None:
1813        """Begin computing the dot product.
1814
1815        Collective.
1816
1817        This should be paired with a call to `dotEnd`.
1818
1819        Parameters
1820        ----------
1821        vec
1822            Vector to compute the dot product with.
1823
1824        See Also
1825        --------
1826        dotEnd, dot, petsc.VecDotBegin
1827
1828        """
1829        cdef PetscScalar sval = 0
1830        CHKERR(VecDotBegin(self.vec, vec.vec, &sval))
1831
1832    def dotEnd(self, Vec vec) -> Scalar:
1833        """Finish computing the dot product initiated with `dotBegin`.
1834
1835        Collective.
1836
1837        See Also
1838        --------
1839        dotBegin, dot, petsc.VecDotEnd
1840
1841        """
1842        cdef PetscScalar sval = 0
1843        CHKERR(VecDotEnd(self.vec, vec.vec, &sval))
1844        return toScalar(sval)
1845
1846    def tDot(self, Vec vec) -> Scalar:
1847        """Return the indefinite dot product with ``vec``.
1848
1849        Collective.
1850
1851        This computes yᵀ·x with ``self`` as x, ``vec``
1852        as y and where yᵀ denotes the transpose of y.
1853
1854        Parameters
1855        ----------
1856        vec
1857            Vector to compute the indefinite dot product with.
1858
1859        See Also
1860        --------
1861        tDotBegin, tDotEnd, dot, petsc.VecTDot
1862
1863        """
1864        cdef PetscScalar sval = 0
1865        CHKERR(VecTDot(self.vec, vec.vec, &sval))
1866        return toScalar(sval)
1867
1868    def tDotBegin(self, Vec vec) -> None:
1869        """Begin computing the indefinite dot product.
1870
1871        Collective.
1872
1873        This should be paired with a call to `tDotEnd`.
1874
1875        Parameters
1876        ----------
1877        vec
1878            Vector to compute the indefinite dot product with.
1879
1880        See Also
1881        --------
1882        tDotEnd, tDot, petsc.VecTDotBegin
1883
1884        """
1885        cdef PetscScalar sval = 0
1886        CHKERR(VecTDotBegin(self.vec, vec.vec, &sval))
1887
1888    def tDotEnd(self, Vec vec) -> Scalar:
1889        """Finish computing the indefinite dot product initiated with `tDotBegin`.
1890
1891        Collective.
1892
1893        See Also
1894        --------
1895        tDotBegin, tDot, petsc.VecTDotEnd
1896
1897        """
1898        cdef PetscScalar sval = 0
1899        CHKERR(VecTDotEnd(self.vec, vec.vec, &sval))
1900        return toScalar(sval)
1901
1902    def mDot(self, vecs: Sequence[Vec], out: ArrayScalar | None = None) -> ArrayScalar:
1903        """Compute Xᴴ·y with X an array of vectors.
1904
1905        Collective.
1906
1907        Parameters
1908        ----------
1909        vecs
1910            Array of vectors.
1911        out
1912            Optional placeholder for the result.
1913
1914        See Also
1915        --------
1916        dot, tDot, mDotBegin, mDotEnd, petsc.VecMDot
1917
1918        """
1919        cdef PetscInt nv=<PetscInt>len(vecs), no=0
1920        cdef PetscVec *v=NULL
1921        cdef PetscScalar *val=NULL
1922        cdef Py_ssize_t i=0
1923        cdef object unused = oarray_p(empty_p(nv), NULL, <void**>&v)
1924        for i from 0 <= i < nv:
1925            v[i] = (<Vec?>(vecs[i])).vec
1926        if out is None:
1927            out = empty_s(nv)
1928        out = oarray_s(out, &no, &val)
1929        if (nv != no): raise ValueError(
1930            ("incompatible array sizes: "
1931             "nv=%d, no=%d") % (toInt(nv), toInt(no)))
1932        CHKERR(VecMDot(self.vec, nv, v, val))
1933        return out
1934
1935    def mDotBegin(self, vecs: Sequence[Vec], out: ArrayScalar) -> None:
1936        """Starts a split phase multiple dot product computation.
1937
1938        Collective.
1939
1940        Parameters
1941        ----------
1942        vecs
1943            Array of vectors.
1944        out
1945            Placeholder for the result.
1946
1947        See Also
1948        --------
1949        mDot, mDotEnd, petsc.VecMDotBegin
1950
1951        """
1952        cdef PetscInt nv=<PetscInt>len(vecs), no=0
1953        cdef PetscVec *v=NULL
1954        cdef PetscScalar *val=NULL
1955        cdef Py_ssize_t i=0
1956        cdef object unused = oarray_p(empty_p(nv), NULL, <void**>&v)
1957        for i from 0 <= i < nv:
1958            v[i] = (<Vec?>(vecs[i])).vec
1959        out = oarray_s(out, &no, &val)
1960        if (nv != no): raise ValueError(
1961            ("incompatible array sizes: "
1962             "nv=%d, no=%d") % (toInt(nv), toInt(no)))
1963        CHKERR(VecMDotBegin(self.vec, nv, v, val))
1964
1965    def mDotEnd(self, vecs: Sequence[Vec], out: ArrayScalar) -> ArrayScalar:
1966        """Ends a split phase multiple dot product computation.
1967
1968        Collective.
1969
1970        Parameters
1971        ----------
1972        vecs
1973            Array of vectors.
1974        out
1975            Placeholder for the result.
1976
1977        See Also
1978        --------
1979        mDot, mDotBegin, petsc.VecMDotEnd
1980
1981        """
1982        cdef PetscInt nv=<PetscInt>len(vecs), no=0
1983        cdef PetscVec *v=NULL
1984        cdef PetscScalar *val=NULL
1985        cdef Py_ssize_t i=0
1986        cdef object unused = oarray_p(empty_p(nv), NULL, <void**>&v)
1987        for i from 0 <= i < nv:
1988            v[i] = (<Vec?>(vecs[i])).vec
1989        out = oarray_s(out, &no, &val)
1990        if (nv != no): raise ValueError(
1991            ("incompatible array sizes: "
1992             "nv=%d, no=%d") % (toInt(nv), toInt(no)))
1993        CHKERR(VecMDotEnd(self.vec, nv, v, val))
1994        return out
1995
1996    def mtDot(self, vecs: Sequence[Vec], out: ArrayScalar | None = None) -> ArrayScalar:
1997        """Compute Xᵀ·y with X an array of vectors.
1998
1999        Collective.
2000
2001        Parameters
2002        ----------
2003        vecs
2004            Array of vectors.
2005        out
2006            Optional placeholder for the result.
2007
2008        See Also
2009        --------
2010        tDot, mDot, mtDotBegin, mtDotEnd, petsc.VecMTDot
2011
2012        """
2013        cdef PetscInt nv=<PetscInt>len(vecs), no=0
2014        cdef PetscVec *v=NULL
2015        cdef PetscScalar *val=NULL
2016        cdef Py_ssize_t i=0
2017        cdef object unused = oarray_p(empty_p(nv), NULL, <void**>&v)
2018        for i from 0 <= i < nv:
2019            v[i] = (<Vec?>(vecs[i])).vec
2020        if out is None:
2021            out = empty_s(nv)
2022        out = oarray_s(out, &no, &val)
2023        if (nv != no): raise ValueError(
2024            ("incompatible array sizes: "
2025             "nv=%d, no=%d") % (toInt(nv), toInt(no)))
2026        CHKERR(VecMTDot(self.vec, nv, v, val))
2027        return out
2028
2029    def mtDotBegin(self, vecs: Sequence[Vec], out: ArrayScalar) -> None:
2030        """Starts a split phase transpose multiple dot product computation.
2031
2032        Collective.
2033
2034        Parameters
2035        ----------
2036        vecs
2037            Array of vectors.
2038        out
2039            Placeholder for the result.
2040
2041        See Also
2042        --------
2043        mtDot, mtDotEnd, petsc.VecMTDotBegin
2044
2045        """
2046        cdef PetscInt nv=<PetscInt>len(vecs), no=0
2047        cdef PetscVec *v=NULL
2048        cdef PetscScalar *val=NULL
2049        cdef Py_ssize_t i=0
2050        cdef object unused = oarray_p(empty_p(nv), NULL, <void**>&v)
2051        for i from 0 <= i < nv:
2052            v[i] = (<Vec?>(vecs[i])).vec
2053        out = oarray_s(out, &no, &val)
2054        if (nv != no): raise ValueError(
2055            ("incompatible array sizes: "
2056             "nv=%d, no=%d") % (toInt(nv), toInt(no)))
2057        CHKERR(VecMTDotBegin(self.vec, nv, v, val))
2058
2059    def mtDotEnd(self, vecs: Sequence[Vec], out: ArrayScalar) -> ArrayScalar:
2060        """Ends a split phase transpose multiple dot product computation.
2061
2062        Collective.
2063
2064        Parameters
2065        ----------
2066        vecs
2067            Array of vectors.
2068        out
2069            Placeholder for the result.
2070
2071        See Also
2072        --------
2073        mtDot, mtDotBegin, petsc.VecMTDotEnd
2074
2075        """
2076        cdef PetscInt nv=<PetscInt>len(vecs), no=0
2077        cdef PetscVec *v=NULL
2078        cdef PetscScalar *val=NULL
2079        cdef Py_ssize_t i=0
2080        cdef object unused = oarray_p(empty_p(nv), NULL, <void**>&v)
2081        for i from 0 <= i < nv:
2082            v[i] = (<Vec?>(vecs[i])).vec
2083        out = oarray_s(out, &no, &val)
2084        if (nv != no): raise ValueError(
2085            ("incompatible array sizes: "
2086             "nv=%d, no=%d") % (toInt(nv), toInt(no)))
2087        CHKERR(VecMTDotEnd(self.vec, nv, v, val))
2088        return out
2089
2090    def norm(
2091        self,
2092        norm_type: NormTypeSpec = None) -> float | tuple[float, float]:
2093        """Compute the vector norm.
2094
2095        Collective.
2096
2097        A 2-tuple is returned if `NormType.NORM_1_AND_2` is specified.
2098
2099        See Also
2100        --------
2101        petsc.VecNorm, petsc.NormType
2102
2103        """
2104        cdef PetscNormType norm_1_2 = PETSC_NORM_1_AND_2
2105        cdef PetscNormType ntype = PETSC_NORM_2
2106        if norm_type is not None: ntype = norm_type
2107        cdef PetscReal rval[2]
2108        CHKERR(VecNorm(self.vec, ntype, rval))
2109        if ntype != norm_1_2: return toReal(rval[0])
2110        else: return (toReal(rval[0]), toReal(rval[1]))
2111
2112    def normBegin(
2113        self,
2114        norm_type: NormTypeSpec = None) -> None:
2115        """Begin computing the vector norm.
2116
2117        Collective.
2118
2119        This should be paired with a call to `normEnd`.
2120
2121        See Also
2122        --------
2123        normEnd, norm, petsc.VecNormBegin
2124
2125        """
2126        cdef PetscNormType ntype = PETSC_NORM_2
2127        if norm_type is not None: ntype = norm_type
2128        cdef PetscReal dummy[2]
2129        CHKERR(VecNormBegin(self.vec, ntype, dummy))
2130
2131    def normEnd(
2132        self,
2133        norm_type: NormTypeSpec = None) -> float | tuple[float, float]:
2134        """Finish computations initiated with `normBegin`.
2135
2136        Collective.
2137
2138        See Also
2139        --------
2140        normBegin, norm, petsc.VecNormEnd
2141
2142        """
2143        cdef PetscNormType norm_1_2 = PETSC_NORM_1_AND_2
2144        cdef PetscNormType ntype = PETSC_NORM_2
2145        if norm_type is not None: ntype = norm_type
2146        cdef PetscReal rval[2]
2147        CHKERR(VecNormEnd(self.vec, ntype, rval))
2148        if ntype != norm_1_2: return toReal(rval[0])
2149        else: return (toReal(rval[0]), toReal(rval[1]))
2150
2151    def dotNorm2(self, Vec vec) -> tuple[Scalar, float]:
2152        """Return the dot product with ``vec`` and its squared norm.
2153
2154        Collective.
2155
2156        See Also
2157        --------
2158        dot, norm, petsc.VecDotNorm2
2159
2160        """
2161        cdef PetscScalar sval = 0
2162        cdef PetscReal rval = 0
2163        CHKERR(VecDotNorm2(self.vec, vec.vec, &sval, &rval))
2164        return toScalar(sval), toReal(float)
2165
2166    def sum(self) -> Scalar:
2167        """Return the sum of all the entries of the vector.
2168
2169        Collective.
2170
2171        See Also
2172        --------
2173        petsc.VecSum
2174
2175        """
2176        cdef PetscScalar sval = 0
2177        CHKERR(VecSum(self.vec, &sval))
2178        return toScalar(sval)
2179
2180    def mean(self) -> Scalar:
2181        """Return the arithmetic mean of all the entries of the vector.
2182
2183        Collective.
2184
2185        See Also
2186        --------
2187        petsc.VecMean
2188
2189        """
2190        cdef PetscScalar sval = 0
2191        CHKERR(VecMean(self.vec, &sval))
2192        return toScalar(sval)
2193
2194    def min(self) -> tuple[int, float]:
2195        """Return the vector entry with minimum real part and its location.
2196
2197        Collective.
2198
2199        Returns
2200        -------
2201        p : int
2202            Location of the minimum value. If multiple entries exist with the
2203            same value then the smallest index will be returned.
2204        val : Scalar
2205            Minimum value.
2206
2207        See Also
2208        --------
2209        max, petsc.VecMin
2210
2211        """
2212        cdef PetscInt  ival = 0
2213        cdef PetscReal rval = 0
2214        CHKERR(VecMin(self.vec, &ival, &rval))
2215        return (toInt(ival), toReal(rval))
2216
2217    def max(self) -> tuple[int, float]:
2218        """Return the vector entry with maximum real part and its location.
2219
2220        Collective.
2221
2222        Returns
2223        -------
2224        p : int
2225            Location of the maximum value. If multiple entries exist with the
2226            same value then the smallest index will be returned.
2227        val : Scalar
2228            Minimum value.
2229
2230        See Also
2231        --------
2232        min, petsc.VecMax
2233
2234        """
2235        cdef PetscInt  ival = 0
2236        cdef PetscReal rval = 0
2237        CHKERR(VecMax(self.vec, &ival, &rval))
2238        return (toInt(ival), toReal(rval))
2239
2240    def normalize(self) -> float:
2241        """Normalize the vector by its 2-norm.
2242
2243        Collective.
2244
2245        Returns
2246        -------
2247        float
2248            The vector norm before normalization.
2249
2250        See Also
2251        --------
2252        norm, petsc.VecNormalize
2253
2254        """
2255        cdef PetscReal rval = 0
2256        CHKERR(VecNormalize(self.vec, &rval))
2257        return toReal(rval)
2258
2259    def reciprocal(self) -> None:
2260        """Replace each entry in the vector by its reciprocal.
2261
2262        Logically collective.
2263
2264        See Also
2265        --------
2266        petsc.VecReciprocal
2267
2268        """
2269        CHKERR(VecReciprocal(self.vec))
2270
2271    def exp(self) -> None:
2272        """Replace each entry (xₙ) in the vector by exp(xₙ).
2273
2274        Logically collective.
2275
2276        See Also
2277        --------
2278        log, petsc.VecExp
2279
2280        """
2281        CHKERR(VecExp(self.vec))
2282
2283    def log(self) -> None:
2284        """Replace each entry in the vector by its natural logarithm.
2285
2286        Logically collective.
2287
2288        See Also
2289        --------
2290        exp, petsc.VecLog
2291
2292        """
2293        CHKERR(VecLog(self.vec))
2294
2295    def sqrtabs(self) -> None:
2296        """Replace each entry (xₙ) in the vector by √|xₙ|.
2297
2298        Logically collective.
2299
2300        See Also
2301        --------
2302        petsc.VecSqrtAbs
2303
2304        """
2305        CHKERR(VecSqrtAbs(self.vec))
2306
2307    def abs(self) -> None:
2308        """Replace each entry (xₙ) in the vector by abs|xₙ|.
2309
2310        Logically collective.
2311
2312        See Also
2313        --------
2314        petsc.VecAbs
2315
2316        """
2317        CHKERR(VecAbs(self.vec))
2318
2319    def conjugate(self) -> None:
2320        """Conjugate the vector.
2321
2322        Logically collective.
2323
2324        See Also
2325        --------
2326        petsc.VecConjugate
2327
2328        """
2329        CHKERR(VecConjugate(self.vec))
2330
2331    def setRandom(self, Random random=None) -> None:
2332        """Set all components of the vector to random numbers.
2333
2334        Collective.
2335
2336        Parameters
2337        ----------
2338        random
2339            Random number generator. If `None` then one will be created
2340            internally.
2341
2342        See Also
2343        --------
2344        petsc.VecSetRandom
2345
2346        """
2347        cdef PetscRandom rnd = NULL
2348        if random is not None: rnd = random.rnd
2349        CHKERR(VecSetRandom(self.vec, rnd))
2350
2351    def permute(self, IS order, invert: bool = False) -> None:
2352        """Permute the vector in-place with a provided ordering.
2353
2354        Collective.
2355
2356        Parameters
2357        ----------
2358        order
2359            Ordering for the permutation.
2360        invert
2361            Whether to invert the permutation.
2362
2363        See Also
2364        --------
2365        petsc.VecPermute
2366
2367        """
2368        cdef PetscBool cinvert = PETSC_FALSE
2369        if invert: cinvert = PETSC_TRUE
2370        CHKERR(VecPermute(self.vec, order.iset, cinvert))
2371
2372    def zeroEntries(self) -> None:
2373        """Set all entries in the vector to zero.
2374
2375        Logically collective.
2376
2377        See Also
2378        --------
2379        set, petsc.VecZeroEntries
2380
2381        """
2382        CHKERR(VecZeroEntries(self.vec))
2383
2384    def set(self, alpha: Scalar) -> None:
2385        """Set all components of the vector to the same value.
2386
2387        Collective.
2388
2389        See Also
2390        --------
2391        zeroEntries, isset, petsc.VecSet
2392
2393        """
2394        cdef PetscScalar sval = asScalar(alpha)
2395        CHKERR(VecSet(self.vec, sval))
2396
2397    def isset(self, IS idx, alpha: Scalar) -> None:
2398        """Set specific elements of the vector to the same value.
2399
2400        Not collective.
2401
2402        Parameters
2403        ----------
2404        idx
2405            Index set specifying the vector entries to set.
2406        alpha
2407            Value to set the selected entries to.
2408
2409        See Also
2410        --------
2411        set, zeroEntries, petsc.VecISSet
2412
2413        """
2414        cdef PetscScalar aval = asScalar(alpha)
2415        CHKERR(VecISSet(self.vec, idx.iset, aval))
2416
2417    def scale(self, alpha: Scalar) -> None:
2418        """Scale all entries of the vector.
2419
2420        Collective.
2421
2422        This method sets each entry (xₙ) in the vector to ɑ·xₙ.
2423
2424        Parameters
2425        ----------
2426        alpha
2427            The scaling factor.
2428
2429        See Also
2430        --------
2431        shift, petsc.VecScale
2432
2433        """
2434        cdef PetscScalar sval = asScalar(alpha)
2435        CHKERR(VecScale(self.vec, sval))
2436
2437    def shift(self, alpha: Scalar) -> None:
2438        """Shift all entries in the vector.
2439
2440        Collective.
2441
2442        This method sets each entry (xₙ) in the vector to xₙ + ɑ.
2443
2444        Parameters
2445        ----------
2446        alpha
2447            The shift to apply to the vector values.
2448
2449        See Also
2450        --------
2451        scale, petsc.VecShift
2452
2453        """
2454        cdef PetscScalar sval = asScalar(alpha)
2455        CHKERR(VecShift(self.vec, sval))
2456
2457    def swap(self, Vec vec) -> None:
2458        """Swap the content of two vectors.
2459
2460        Logically collective.
2461
2462        Parameters
2463        ----------
2464        vec
2465            The vector to swap data with.
2466
2467        See Also
2468        --------
2469        petsc.VecSwap
2470
2471        """
2472        CHKERR(VecSwap(self.vec, vec.vec))
2473
2474    def axpy(self, alpha: Scalar, Vec x) -> None:
2475        """Compute and store y = ɑ·x + y.
2476
2477        Logically collective.
2478
2479        Parameters
2480        ----------
2481        alpha
2482            Scale factor.
2483        x
2484            Input vector.
2485
2486        See Also
2487        --------
2488        isaxpy, petsc.VecAXPY
2489
2490        """
2491        cdef PetscScalar sval = asScalar(alpha)
2492        CHKERR(VecAXPY(self.vec, sval, x.vec))
2493
2494    def isaxpy(self, IS idx, alpha: Scalar, Vec x) -> None:
2495        """Add a scaled reduced-space vector to a subset of the vector.
2496
2497        Logically collective.
2498
2499        This is equivalent to ``y[idx[i]] += alpha*x[i]``.
2500
2501        Parameters
2502        ----------
2503        idx
2504            Index set for the reduced space. Negative indices are skipped.
2505        alpha
2506            Scale factor.
2507        x
2508            Reduced-space vector.
2509
2510        See Also
2511        --------
2512        axpy, aypx, axpby, petsc.VecISAXPY
2513
2514        """
2515        cdef PetscScalar sval = asScalar(alpha)
2516        CHKERR(VecISAXPY(self.vec, idx.iset, sval, x.vec))
2517
2518    def aypx(self, alpha: Scalar, Vec x) -> None:
2519        """Compute and store y = x + ɑ·y.
2520
2521        Logically collective.
2522
2523        Parameters
2524        ----------
2525        alpha
2526            Scale factor.
2527        x
2528            Input vector, must not be the current vector.
2529
2530        See Also
2531        --------
2532        axpy, axpby, petsc.VecAYPX
2533
2534        """
2535        cdef PetscScalar sval = asScalar(alpha)
2536        CHKERR(VecAYPX(self.vec, sval, x.vec))
2537
2538    def axpby(self, alpha: Scalar, beta: Scalar, Vec x) -> None:
2539        """Compute and store y = ɑ·x + β·y.
2540
2541        Logically collective.
2542
2543        Parameters
2544        ----------
2545        alpha
2546            First scale factor.
2547        beta
2548            Second scale factor.
2549        x
2550            Input vector, must not be the current vector.
2551
2552        See Also
2553        --------
2554        axpy, aypx, waxpy, petsc.VecAXPBY
2555
2556        """
2557        cdef PetscScalar sval1 = asScalar(alpha)
2558        cdef PetscScalar sval2 = asScalar(beta)
2559        CHKERR(VecAXPBY(self.vec, sval1, sval2, x.vec))
2560
2561    def waxpy(self, alpha: Scalar, Vec x, Vec y) -> None:
2562        """Compute and store w = ɑ·x + y.
2563
2564        Logically collective.
2565
2566        Parameters
2567        ----------
2568        alpha
2569            Scale factor.
2570        x
2571            First input vector.
2572        y
2573            Second input vector.
2574
2575        See Also
2576        --------
2577        axpy, aypx, axpby, maxpy, petsc.VecWAXPY
2578
2579        """
2580        cdef PetscScalar sval = asScalar(alpha)
2581        CHKERR(VecWAXPY(self.vec, sval, x.vec, y.vec))
2582
2583    def maxpy(self, alphas: Sequence[Scalar], vecs: Sequence[Vec]) -> None:
2584        """Compute and store y = Σₙ(ɑₙ·Xₙ) + y with X an array of vectors.
2585
2586        Logically collective.
2587
2588        Equivalent to ``y[:] = alphas[i]*vecs[i, :] + y[:]``.
2589
2590        Parameters
2591        ----------
2592        alphas
2593            Array of scale factors, one for each vector in ``vecs``.
2594        vecs
2595            Array of vectors.
2596
2597        See Also
2598        --------
2599        axpy, aypx, axpby, waxpy, petsc.VecMAXPY
2600
2601        """
2602        cdef PetscInt n = 0
2603        cdef PetscScalar *a = NULL
2604        cdef PetscVec *v = NULL
2605        cdef object unused1 = iarray_s(alphas, &n, &a)
2606        cdef object unused2 = oarray_p(empty_p(n), NULL, <void**>&v)
2607        assert n == len(vecs)
2608        cdef Py_ssize_t i=0
2609        for i from 0 <= i < n:
2610            v[i] = (<Vec?>(vecs[i])).vec
2611        CHKERR(VecMAXPY(self.vec, n, a, v))
2612
2613    def pointwiseMult(self, Vec x, Vec y) -> None:
2614        """Compute and store the component-wise multiplication of two vectors.
2615
2616        Logically collective.
2617
2618        Equivalent to ``w[i] = x[i] * y[i]``.
2619
2620        Parameters
2621        ----------
2622        x, y
2623            Input vectors to multiply component-wise.
2624
2625        See Also
2626        --------
2627        pointwiseDivide, petsc.VecPointwiseMult
2628
2629        """
2630        CHKERR(VecPointwiseMult(self.vec, x.vec, y.vec))
2631
2632    def pointwiseDivide(self, Vec x, Vec y) -> None:
2633        """Compute and store the component-wise division of two vectors.
2634
2635        Logically collective.
2636
2637        Equivalent to ``w[i] = x[i] / y[i]``.
2638
2639        Parameters
2640        ----------
2641        x
2642            Numerator vector.
2643        y
2644            Denominator vector.
2645
2646        See Also
2647        --------
2648        pointwiseMult, petsc.VecPointwiseDivide
2649
2650        """
2651        CHKERR(VecPointwiseDivide(self.vec, x.vec, y.vec))
2652
2653    def pointwiseMin(self, Vec x, Vec y) -> None:
2654        """Compute and store the component-wise minimum of two vectors.
2655
2656        Logically collective.
2657
2658        Equivalent to ``w[i] = min(x[i], y[i])``.
2659
2660        Parameters
2661        ----------
2662        x, y
2663            Input vectors to find the component-wise minima.
2664
2665        See Also
2666        --------
2667        pointwiseMax, pointwiseMaxAbs, petsc.VecPointwiseMin
2668
2669        """
2670        CHKERR(VecPointwiseMin(self.vec, x.vec, y.vec))
2671
2672    def pointwiseMax(self, Vec x, Vec y) -> None:
2673        """Compute and store the component-wise maximum of two vectors.
2674
2675        Logically collective.
2676
2677        Equivalent to ``w[i] = max(x[i], y[i])``.
2678
2679        Parameters
2680        ----------
2681        x, y
2682            Input vectors to find the component-wise maxima.
2683
2684        See Also
2685        --------
2686        pointwiseMin, pointwiseMaxAbs, petsc.VecPointwiseMax
2687
2688        """
2689        CHKERR(VecPointwiseMax(self.vec, x.vec, y.vec))
2690
2691    def pointwiseMaxAbs(self, Vec x, Vec y) -> None:
2692        """Compute and store the component-wise maximum absolute values.
2693
2694        Logically collective.
2695
2696        Equivalent to ``w[i] = max(abs(x[i]), abs(y[i]))``.
2697
2698        Parameters
2699        ----------
2700        x, y
2701            Input vectors to find the component-wise maxima.
2702
2703        See Also
2704        --------
2705        pointwiseMin, pointwiseMax, petsc.VecPointwiseMaxAbs
2706
2707        """
2708        CHKERR(VecPointwiseMaxAbs(self.vec, x.vec, y.vec))
2709
2710    def maxPointwiseDivide(self, Vec vec) -> float:
2711        """Return the maximum of the component-wise absolute value division.
2712
2713        Logically collective.
2714
2715        Equivalent to ``result = max_i abs(x[i] / y[i])``.
2716
2717        Parameters
2718        ----------
2719        x
2720            Numerator vector.
2721        y
2722            Denominator vector.
2723
2724        See Also
2725        --------
2726        pointwiseMin, pointwiseMax, pointwiseMaxAbs
2727        petsc.VecMaxPointwiseDivide
2728
2729        """
2730        cdef PetscReal rval = 0
2731        CHKERR(VecMaxPointwiseDivide(self.vec, vec.vec, &rval))
2732        return toReal(rval)
2733
2734    def getValue(self, index: int) -> Scalar:
2735        """Return a single value from the vector.
2736
2737        Not collective.
2738
2739        Only values locally stored may be accessed.
2740
2741        Parameters
2742        ----------
2743        index
2744            Location of the value to read.
2745
2746        See Also
2747        --------
2748        getValues, petsc.VecGetValues
2749
2750        """
2751        cdef PetscInt    ival = asInt(index)
2752        cdef PetscScalar sval = 0
2753        CHKERR(VecGetValues(self.vec, 1, &ival, &sval))
2754        return toScalar(sval)
2755
2756    def getValues(
2757        self,
2758        indices: Sequence[int],
2759        values: Sequence[Scalar] | None = None) -> ArrayScalar:
2760        """Return values from certain locations in the vector.
2761
2762        Not collective.
2763
2764        Only values locally stored may be accessed.
2765
2766        Parameters
2767        ----------
2768        indices
2769            Locations of the values to read.
2770        values
2771            Location to store the collected values. If not provided then a new
2772            array will be allocated.
2773
2774        See Also
2775        --------
2776        getValue, setValues, petsc.VecGetValues
2777
2778        """
2779        return vecgetvalues(self.vec, indices, values)
2780
2781    def getValuesStagStencil(self, indices, values=None) -> None:
2782        """Not implemented."""
2783        raise NotImplementedError
2784
2785    def setValue(
2786        self,
2787        index: int,
2788        value: Scalar,
2789        addv: InsertModeSpec = None) -> None:
2790        """Insert or add a single value in the vector.
2791
2792        Not collective.
2793
2794        Parameters
2795        ----------
2796        index
2797            Location to write to. Negative indices are ignored.
2798        value
2799            Value to insert at ``index``.
2800        addv
2801            Insertion mode.
2802
2803        Notes
2804        -----
2805        The values may be cached so `assemblyBegin` and `assemblyEnd`
2806        must be called after all calls of this method are completed.
2807
2808        Multiple calls to `setValue` cannot be made with different values
2809        for ``addv`` without intermediate calls to `assemblyBegin` and
2810        `assemblyEnd`.
2811
2812        See Also
2813        --------
2814        setValues, petsc.VecSetValues
2815
2816        """
2817        cdef PetscInt    ival = asInt(index)
2818        cdef PetscScalar sval = asScalar(value)
2819        cdef PetscInsertMode caddv = insertmode(addv)
2820        CHKERR(VecSetValues(self.vec, 1, &ival, &sval, caddv))
2821
2822    def setValues(
2823        self,
2824        indices: Sequence[int],
2825        values: Sequence[Scalar],
2826        addv: InsertModeSpec = None) -> None:
2827        """Insert or add multiple values in the vector.
2828
2829        Not collective.
2830
2831        Parameters
2832        ----------
2833        indices
2834            Locations to write to. Negative indices are ignored.
2835        values
2836            Values to insert at ``indices``.
2837        addv
2838            Insertion mode.
2839
2840        Notes
2841        -----
2842        The values may be cached so `assemblyBegin` and `assemblyEnd`
2843        must be called after all calls of this method are completed.
2844
2845        Multiple calls to `setValues` cannot be made with different values
2846        for ``addv`` without intermediate calls to `assemblyBegin` and
2847        `assemblyEnd`.
2848
2849        See Also
2850        --------
2851        setValue, petsc.VecSetValues
2852
2853        """
2854        vecsetvalues(self.vec, indices, values, addv, 0, 0)
2855
2856    def setValuesBlocked(
2857        self,
2858        indices: Sequence[int],
2859        values: Sequence[Scalar],
2860        addv: InsertModeSpec = None) -> None:
2861        """Insert or add blocks of values in the vector.
2862
2863        Not collective.
2864
2865        Equivalent to ``x[bs*indices[i]+j] = y[bs*i+j]`` for
2866        ``0 <= i < len(indices)``, ``0 <= j < bs`` and ``bs`` `block_size`.
2867
2868        Parameters
2869        ----------
2870        indices
2871            Block indices to write to. Negative indices are ignored.
2872        values
2873            Values to insert at ``indices``. Should have length
2874            ``len(indices) * vec.block_size``.
2875        addv
2876            Insertion mode.
2877
2878        Notes
2879        -----
2880        The values may be cached so `assemblyBegin` and `assemblyEnd`
2881        must be called after all calls of this method are completed.
2882
2883        Multiple calls to `setValuesBlocked` cannot be made with different
2884        values for ``addv`` without intermediate calls to `assemblyBegin`
2885        and `assemblyEnd`.
2886
2887        See Also
2888        --------
2889        setValues, petsc.VecSetValuesBlocked
2890
2891        """
2892        vecsetvalues(self.vec, indices, values, addv, 1, 0)
2893
2894    def setValuesStagStencil(self, indices, values, addv=None) -> None:
2895        """Not implemented."""
2896        raise NotImplementedError
2897
2898    def setLGMap(self, LGMap lgmap) -> None:
2899        """Set the local-to-global mapping.
2900
2901        Logically collective.
2902
2903        This allows users to insert vector entries using a local numbering
2904        with `setValuesLocal`.
2905
2906        See Also
2907        --------
2908        setValues, setValuesLocal, getLGMap, petsc.VecSetLocalToGlobalMapping
2909
2910        """
2911        CHKERR(VecSetLocalToGlobalMapping(self.vec, lgmap.lgm))
2912
2913    def getLGMap(self) -> LGMap:
2914        """Return the local-to-global mapping.
2915
2916        Not collective.
2917
2918        See Also
2919        --------
2920        setLGMap, petsc.VecGetLocalToGlobalMapping
2921
2922        """
2923        cdef LGMap cmap = LGMap()
2924        CHKERR(VecGetLocalToGlobalMapping(self.vec, &cmap.lgm))
2925        CHKERR(PetscINCREF(cmap.obj))
2926        return cmap
2927
2928    def setValueLocal(
2929        self,
2930        index: int,
2931        value: Scalar,
2932        addv: InsertModeSpec = None) -> None:
2933        """Insert or add a single value in the vector using a local numbering.
2934
2935        Not collective.
2936
2937        Parameters
2938        ----------
2939        index
2940            Location to write to.
2941        value
2942            Value to insert at ``index``.
2943        addv
2944            Insertion mode.
2945
2946        Notes
2947        -----
2948        The values may be cached so `assemblyBegin` and `assemblyEnd`
2949        must be called after all calls of this method are completed.
2950
2951        Multiple calls to `setValueLocal` cannot be made with different
2952        values for ``addv`` without intermediate calls to `assemblyBegin`
2953        and `assemblyEnd`.
2954
2955        See Also
2956        --------
2957        setValuesLocal, petsc.VecSetValuesLocal
2958
2959        """
2960        cdef PetscInt    ival = asInt(index)
2961        cdef PetscScalar sval = asScalar(value)
2962        cdef PetscInsertMode caddv = insertmode(addv)
2963        CHKERR(VecSetValuesLocal(self.vec, 1, &ival, &sval, caddv))
2964
2965    def setValuesLocal(
2966        self,
2967        indices: Sequence[int],
2968        values: Sequence[Scalar],
2969        addv: InsertModeSpec = None) -> None:
2970        """Insert or add multiple values in the vector with a local numbering.
2971
2972        Not collective.
2973
2974        Parameters
2975        ----------
2976        indices
2977            Locations to write to.
2978        values
2979            Values to insert at ``indices``.
2980        addv
2981            Insertion mode.
2982
2983        Notes
2984        -----
2985        The values may be cached so `assemblyBegin` and `assemblyEnd`
2986        must be called after all calls of this method are completed.
2987
2988        Multiple calls to `setValuesLocal` cannot be made with different
2989        values for ``addv`` without intermediate calls to `assemblyBegin`
2990        and `assemblyEnd`.
2991
2992        See Also
2993        --------
2994        setValues, petsc.VecSetValuesLocal
2995
2996        """
2997        vecsetvalues(self.vec, indices, values, addv, 0, 1)
2998
2999    def setValuesBlockedLocal(
3000        self,
3001        indices: Sequence[int],
3002        values: Sequence[Scalar],
3003        addv: InsertModeSpec = None) -> None:
3004        """Insert or add blocks of values in the vector with a local numbering.
3005
3006        Not collective.
3007
3008        Equivalent to ``x[bs*indices[i]+j] = y[bs*i+j]`` for
3009        ``0 <= i < len(indices)``, ``0 <= j < bs`` and ``bs`` `block_size`.
3010
3011        Parameters
3012        ----------
3013        indices
3014            Local block indices to write to.
3015        values
3016            Values to insert at ``indices``. Should have length
3017            ``len(indices) * vec.block_size``.
3018        addv
3019            Insertion mode.
3020
3021        Notes
3022        -----
3023        The values may be cached so `assemblyBegin` and `assemblyEnd`
3024        must be called after all calls of this method are completed.
3025
3026        Multiple calls to `setValuesBlockedLocal` cannot be made with
3027        different values for ``addv`` without intermediate calls to
3028        `assemblyBegin` and `assemblyEnd`.
3029
3030        See Also
3031        --------
3032        setValuesBlocked, setValuesLocal, petsc.VecSetValuesBlockedLocal
3033
3034        """
3035        vecsetvalues(self.vec, indices, values, addv, 1, 1)
3036
3037    def assemblyBegin(self) -> None:
3038        """Begin an assembling stage of the vector.
3039
3040        Collective.
3041
3042        See Also
3043        --------
3044        assemblyEnd, petsc.VecAssemblyBegin
3045
3046        """
3047        CHKERR(VecAssemblyBegin(self.vec))
3048
3049    def assemblyEnd(self) -> None:
3050        """Finish the assembling stage initiated with `assemblyBegin`.
3051
3052        Collective.
3053
3054        See Also
3055        --------
3056        assemblyBegin, petsc.VecAssemblyEnd
3057
3058        """
3059        CHKERR(VecAssemblyEnd(self.vec))
3060
3061    def assemble(self) -> None:
3062        """Assemble the vector.
3063
3064        Collective.
3065
3066        See Also
3067        --------
3068        assemblyBegin, assemblyEnd
3069
3070        """
3071        CHKERR(VecAssemblyBegin(self.vec))
3072        CHKERR(VecAssemblyEnd(self.vec))
3073
3074    # --- methods for strided vectors ---
3075
3076    def strideScale(self, field: int, alpha: Scalar) -> None:
3077        """Scale a component of the vector.
3078
3079        Logically collective.
3080
3081        Parameters
3082        ----------
3083        field
3084            Component index. Must be between ``0`` and ``vec.block_size``.
3085        alpha
3086            Factor to multiple the component entries by.
3087
3088        See Also
3089        --------
3090        strideSum, strideMin, strideMax, petsc.VecStrideScale
3091
3092        """
3093        cdef PetscInt    ival = asInt(field)
3094        cdef PetscScalar sval = asScalar(alpha)
3095        CHKERR(VecStrideScale(self.vec, ival, sval))
3096
3097    def strideSum(self, field: int) -> Scalar:
3098        """Sum subvector entries.
3099
3100        Collective.
3101
3102        Equivalent to ``sum(x[field], x[field+bs], x[field+2*bs], ...)`` where
3103        ``bs`` is `block_size`.
3104
3105        Parameters
3106        ----------
3107        field
3108            Component index. Must be between ``0`` and ``vec.block_size``.
3109
3110        See Also
3111        --------
3112        strideScale, strideMin, strideMax, petsc.VecStrideSum
3113
3114        """
3115        cdef PetscInt    ival = asInt(field)
3116        cdef PetscScalar sval = 0
3117        CHKERR(VecStrideSum(self.vec, ival, &sval))
3118        return toScalar(sval)
3119
3120    def strideMin(self, field: int) -> tuple[int, float]:
3121        """Return the minimum of entries in a subvector.
3122
3123        Collective.
3124
3125        Equivalent to ``min(x[field], x[field+bs], x[field+2*bs], ...)`` where
3126        ``bs`` is `block_size`.
3127
3128        Parameters
3129        ----------
3130        field
3131            Component index. Must be between ``0`` and ``vec.block_size``.
3132
3133        Returns
3134        -------
3135        int
3136            Location of minimum.
3137        float
3138            Minimum value.
3139
3140        See Also
3141        --------
3142        strideScale, strideSum, strideMax, petsc.VecStrideMin
3143
3144        """
3145        cdef PetscInt  ival1 = asInt(field)
3146        cdef PetscInt  ival2 = 0
3147        cdef PetscReal rval  = 0
3148        CHKERR(VecStrideMin(self.vec, ival1, &ival2, &rval))
3149        return (toInt(ival2), toReal(rval))
3150
3151    def strideMax(self, field: int) -> tuple[int, float]:
3152        """Return the maximum of entries in a subvector.
3153
3154        Collective.
3155
3156        Equivalent to ``max(x[field], x[field+bs], x[field+2*bs], ...)`` where
3157        ``bs`` is `block_size`.
3158
3159        Parameters
3160        ----------
3161        field
3162            Component index. Must be between ``0`` and ``vec.block_size``.
3163
3164        Returns
3165        -------
3166        int
3167            Location of maximum.
3168        float
3169            Maximum value.
3170
3171        See Also
3172        --------
3173        strideScale, strideSum, strideMin, petsc.VecStrideMax
3174
3175        """
3176        cdef PetscInt  ival1 = asInt(field)
3177        cdef PetscInt  ival2 = 0
3178        cdef PetscReal rval  = 0
3179        CHKERR(VecStrideMax(self.vec, ival1, &ival2, &rval))
3180        return (toInt(ival2), toReal(rval))
3181
3182    def strideNorm(
3183        self,
3184        field: int,
3185        norm_type: NormTypeSpec = None) -> float | tuple[float, float]:
3186        """Return the norm of entries in a subvector.
3187
3188        Collective.
3189
3190        Equivalent to ``norm(x[field], x[field+bs], x[field+2*bs], ...)`` where
3191        ``bs`` is `block_size`.
3192
3193        Parameters
3194        ----------
3195        field
3196            Component index. Must be between ``0`` and ``vec.block_size``.
3197        norm_type
3198            The norm type.
3199
3200        See Also
3201        --------
3202        norm, strideScale, strideSum, petsc.VecStrideNorm
3203
3204        """
3205        cdef PetscInt ival = asInt(field)
3206        cdef PetscNormType norm_1_2 = PETSC_NORM_1_AND_2
3207        cdef PetscNormType ntype = PETSC_NORM_2
3208        if norm_type is not None: ntype = norm_type
3209        cdef PetscReal rval[2]
3210        CHKERR(VecStrideNorm(self.vec, ival, ntype, rval))
3211        if ntype != norm_1_2: return toReal(rval[0])
3212        else: return (toReal(rval[0]), toReal(rval[1]))
3213
3214    def strideScatter(
3215        self,
3216        field: int,
3217        Vec vec,
3218        addv: InsertModeSpec = None) -> None:
3219        """Scatter entries into a component of another vector.
3220
3221        Collective.
3222
3223        The current vector is expected to be single-component
3224        (`block_size` of ``1``) and the target vector is expected to be
3225        multi-component.
3226
3227        Parameters
3228        ----------
3229        field
3230            Component index. Must be between ``0`` and ``vec.block_size``.
3231        vec
3232            Multi-component vector to be scattered into.
3233        addv
3234            Insertion mode.
3235
3236        See Also
3237        --------
3238        strideGather, petsc.VecStrideScatter
3239
3240        """
3241        cdef PetscInt ival = asInt(field)
3242        cdef PetscInsertMode caddv = insertmode(addv)
3243        CHKERR(VecStrideScatter(self.vec, ival, vec.vec, caddv))
3244
3245    def strideGather(
3246        self,
3247        field: int,
3248        Vec vec,
3249        addv: InsertModeSpec = None) -> None:
3250        """Insert component values into a single-component vector.
3251
3252        Collective.
3253
3254        The current vector is expected to be multi-component (`block_size`
3255        greater than ``1``) and the target vector is expected to be
3256        single-component.
3257
3258        Parameters
3259        ----------
3260        field
3261            Component index. Must be between ``0`` and ``vec.block_size``.
3262        vec
3263            Single-component vector to be inserted into.
3264        addv
3265            Insertion mode.
3266
3267        See Also
3268        --------
3269        strideScatter, petsc.VecStrideScatter
3270
3271        """
3272        cdef PetscInt ival = asInt(field)
3273        cdef PetscInsertMode caddv = insertmode(addv)
3274        CHKERR(VecStrideGather(self.vec, ival, vec.vec, caddv))
3275
3276    # --- methods for vectors with ghost values ---
3277
3278    def localForm(self) -> Any:
3279        """Return a context manager for viewing ghost vectors in local form.
3280
3281        Logically collective.
3282
3283        Returns
3284        -------
3285        typing.Any
3286            Context manager yielding the vector in local (ghosted) form.
3287
3288        Notes
3289        -----
3290        This operation does not perform a copy. To obtain up-to-date ghost
3291        values `ghostUpdateBegin` and `ghostUpdateEnd` must be called
3292        first.
3293
3294        Non-ghost values can be found
3295        at ``values[0:nlocal]`` and ghost values at
3296        ``values[nlocal:nlocal+nghost]``.
3297
3298        Examples
3299        --------
3300        >>> with vec.localForm() as lf:
3301        ...     # compute with lf
3302
3303        See Also
3304        --------
3305        createGhost, ghostUpdateBegin, ghostUpdateEnd
3306        petsc.VecGhostGetLocalForm, petsc.VecGhostRestoreLocalForm
3307
3308        """
3309        return _Vec_LocalForm(self)
3310
3311    def ghostUpdateBegin(
3312        self,
3313        addv: InsertModeSpec = None,
3314        mode: ScatterModeSpec = None) -> None:
3315        """Begin updating ghosted vector entries.
3316
3317        Neighborwise collective.
3318
3319        See Also
3320        --------
3321        ghostUpdateEnd, ghostUpdate, createGhost, petsc.VecGhostUpdateBegin
3322
3323        """
3324        cdef PetscInsertMode  caddv = insertmode(addv)
3325        cdef PetscScatterMode csctm = scattermode(mode)
3326        CHKERR(VecGhostUpdateBegin(self.vec, caddv, csctm))
3327
3328    def ghostUpdateEnd(
3329        self,
3330        addv: InsertModeSpec = None,
3331        mode: ScatterModeSpec = None) -> None:
3332        """Finish updating ghosted vector entries initiated with `ghostUpdateBegin`.
3333
3334        Neighborwise collective.
3335
3336        See Also
3337        --------
3338        ghostUpdateBegin, ghostUpdate, createGhost, petsc.VecGhostUpdateEnd
3339
3340        """
3341        cdef PetscInsertMode  caddv = insertmode(addv)
3342        cdef PetscScatterMode csctm = scattermode(mode)
3343        CHKERR(VecGhostUpdateEnd(self.vec, caddv, csctm))
3344
3345    def ghostUpdate(
3346        self,
3347        addv: InsertModeSpec = None,
3348        mode: ScatterModeSpec = None) -> None:
3349        """Update ghosted vector entries.
3350
3351        Neighborwise collective.
3352
3353        Parameters
3354        ----------
3355        addv
3356            Insertion mode.
3357        mode
3358            Scatter mode.
3359
3360        Examples
3361        --------
3362        To accumulate ghost region values onto owning processes:
3363
3364        >>> vec.ghostUpdate(InsertMode.ADD_VALUES, ScatterMode.REVERSE)
3365
3366        Update ghost regions:
3367
3368        >>> vec.ghostUpdate(InsertMode.INSERT_VALUES, ScatterMode.FORWARD)
3369
3370        See Also
3371        --------
3372        ghostUpdateBegin, ghostUpdateEnd
3373
3374        """
3375        cdef PetscInsertMode  caddv = insertmode(addv)
3376        cdef PetscScatterMode csctm = scattermode(mode)
3377        CHKERR(VecGhostUpdateBegin(self.vec, caddv, csctm))
3378        CHKERR(VecGhostUpdateEnd(self.vec, caddv, csctm))
3379
3380    def setMPIGhost(self, ghosts: Sequence[int]) -> None:
3381        """Set the ghost points for a ghosted vector.
3382
3383        Collective.
3384
3385        Parameters
3386        ----------
3387        ghosts
3388            Global indices of ghost points.
3389
3390        See Also
3391        --------
3392        createGhost
3393
3394        """
3395        cdef PetscInt ng=0, *ig=NULL
3396        ghosts = iarray_i(ghosts, &ng, &ig)
3397        CHKERR(VecMPISetGhost(self.vec, ng, ig))
3398
3399    def getGhostIS(self) -> IS:
3400        """Return ghosting indices of a ghost vector.
3401
3402        Collective.
3403
3404        Returns
3405        -------
3406        IS
3407            Indices of ghosts.
3408
3409        See Also
3410        --------
3411        petsc.VecGhostGetGhostIS
3412
3413        """
3414        cdef PetscIS indices = NULL
3415        CHKERR(VecGhostGetGhostIS(self.vec, &indices))
3416        return ref_IS(indices)
3417
3418    #
3419
3420    def getSubVector(self, IS iset, Vec subvec=None) -> Vec:
3421        """Return a subvector from given indices.
3422
3423        Collective.
3424
3425        Once finished with the subvector it should be returned with
3426        `restoreSubVector`.
3427
3428        Parameters
3429        ----------
3430        iset
3431            Index set describing which indices to extract into the subvector.
3432        subvec
3433            Subvector to copy entries into. If `None` then a new `Vec` will
3434            be created.
3435
3436        See Also
3437        --------
3438        restoreSubVector, petsc.VecGetSubVector
3439
3440        """
3441        if subvec is None: subvec = Vec()
3442        else: CHKERR(VecDestroy(&subvec.vec))
3443        CHKERR(VecGetSubVector(self.vec, iset.iset, &subvec.vec))
3444        return subvec
3445
3446    def restoreSubVector(self, IS iset, Vec subvec) -> None:
3447        """Restore a subvector extracted using `getSubVector`.
3448
3449        Collective.
3450
3451        Parameters
3452        ----------
3453        iset
3454            Index set describing the indices represented by the subvector.
3455        subvec
3456            Subvector to be restored.
3457
3458        See Also
3459        --------
3460        getSubVector, petsc.VecRestoreSubVector
3461
3462        """
3463        CHKERR(VecRestoreSubVector(self.vec, iset.iset, &subvec.vec))
3464
3465    def getNestSubVecs(self) -> list[Vec]:
3466        """Return all the vectors contained in the nested vector.
3467
3468        Not collective.
3469
3470        See Also
3471        --------
3472        setNestSubVecs, petsc.VecNestGetSubVecs
3473
3474        """
3475        cdef PetscInt N=0
3476        cdef PetscVec* sx=NULL
3477        CHKERR(VecNestGetSubVecs(self.vec, &N, &sx))
3478        output = []
3479        for i in range(N):
3480            pyvec = Vec()
3481            pyvec.vec = sx[i]
3482            CHKERR(PetscObjectReference(<PetscObject> pyvec.vec))
3483            output.append(pyvec)
3484
3485        return output
3486
3487    def setNestSubVecs(
3488        self,
3489        sx: Sequence[Vec],
3490        idxm: Sequence[int] | None = None) -> None:
3491        """Set the component vectors at specified indices in the nested vector.
3492
3493        Not collective.
3494
3495        Parameters
3496        ----------
3497        sx
3498            Array of component vectors.
3499        idxm
3500            Indices of the component vectors, defaults to ``range(len(sx))``.
3501
3502        See Also
3503        --------
3504        getNestSubVecs, petsc.VecNestSetSubVecs
3505
3506        """
3507        if idxm is None: idxm = range(len(sx))
3508        else: assert len(idxm) == len(sx)
3509        cdef PetscInt N = 0
3510        cdef PetscInt* cidxm = NULL
3511        idxm = iarray_i(idxm, &N, &cidxm)
3512
3513        cdef PetscVec* csx = NULL
3514        cdef object unused = oarray_p(empty_p(N), NULL, <void**>&csx)
3515        for i from 0 <= i < N: csx[i] = (<Vec?>sx[i]).vec
3516
3517        CHKERR(VecNestSetSubVecs(self.vec, N, cidxm, csx))
3518
3519    #
3520
3521    def setDM(self, DM dm) -> None:
3522        """Associate a `DM` to the vector.
3523
3524        Not collective.
3525
3526        See Also
3527        --------
3528        getDM, petsc.VecSetDM
3529
3530        """
3531        CHKERR(VecSetDM(self.vec, dm.dm))
3532
3533    def getDM(self) -> DM:
3534        """Return the `DM` associated to the vector.
3535
3536        Not collective.
3537
3538        See Also
3539        --------
3540        setDM, petsc.VecGetDM
3541
3542        """
3543        cdef PetscDM newdm = NULL
3544        CHKERR(VecGetDM(self.vec, &newdm))
3545        cdef DM dm = subtype_DM(newdm)()
3546        dm.dm = newdm
3547        CHKERR(PetscINCREF(dm.obj))
3548        return dm
3549
3550    #
3551
3552    @classmethod
3553    def concatenate(cls, vecs: Sequence[Vec]) -> tuple[Vec, list[IS]]:
3554        """Concatenate vectors into a single vector.
3555
3556        Collective.
3557
3558        Parameters
3559        ----------
3560        vecs
3561            The vectors to be concatenated.
3562
3563        Returns
3564        -------
3565        vector_out : Vec
3566            The concatenated vector.
3567        indices_list : list of IS
3568            A list of index sets corresponding to the concatenated components.
3569
3570        See Also
3571        --------
3572        petsc.VecConcatenate
3573
3574        """
3575        vecs = list(vecs)
3576        cdef Py_ssize_t i, m = len(vecs)
3577        cdef PetscInt n = <PetscInt>m
3578        cdef PetscVec newvec = NULL
3579        cdef PetscVec *cvecs  = NULL
3580        cdef PetscIS  *cisets = NULL
3581        cdef object unused1
3582        cdef object vec_index_ises = []
3583        unused1 = oarray_p(empty_p(n), NULL, <void**>&cvecs)
3584        for i from 0 <= i < m:
3585            cvecs[i] = (<Vec?>vecs[i]).vec
3586        CHKERR(VecConcatenate(n, cvecs, &newvec, &cisets))
3587        cdef Vec self = cls()
3588        self.vec = newvec
3589        for i from 0 <= i < m:
3590            temp = IS()
3591            temp.iset = cisets[i]
3592            vec_index_ises.append(temp)
3593        CHKERR(PetscFree(cisets))
3594        return self, vec_index_ises
3595
3596    property sizes:
3597        """The local and global vector sizes."""
3598        def __get__(self) -> LayoutSizeSpec:
3599            return self.getSizes()
3600
3601        def __set__(self, value):
3602            self.setSizes(value)
3603
3604    property size:
3605        """The global vector size."""
3606        def __get__(self) -> int:
3607            return self.getSize()
3608
3609    property local_size:
3610        """The local vector size."""
3611        def __get__(self) -> int:
3612            return self.getLocalSize()
3613
3614    property block_size:
3615        """The block size."""
3616        def __get__(self) -> int:
3617            return self.getBlockSize()
3618
3619    property owner_range:
3620        """The locally owned range of indices in the form ``[low, high)``."""
3621        def __get__(self) -> tuple[int, int]:
3622            return self.getOwnershipRange()
3623
3624    property owner_ranges:
3625        """The range of indices owned by each process."""
3626        def __get__(self) -> ArrayInt:
3627            return self.getOwnershipRanges()
3628
3629    property buffer_w:
3630        """Writeable buffered view of the local portion of the vector."""
3631        def __get__(self) -> Any:
3632            return self.getBuffer()
3633
3634    property buffer_r:
3635        """Read-only buffered view of the local portion of the vector."""
3636        def __get__(self) -> Any:
3637            return self.getBuffer(True)
3638
3639    property array_w:
3640        """Writeable `ndarray` containing the local portion of the vector."""
3641        def __get__(self) -> ArrayScalar:
3642            return self.getArray()
3643
3644        def __set__(self, value):
3645            cdef buf = self.getBuffer()
3646            with buf as array: array[:] = value
3647
3648    property array_r:
3649        """Read-only `ndarray` containing the local portion of the vector."""
3650        def __get__(self) -> ArrayScalar:
3651            return self.getArray(True)
3652
3653    property buffer:
3654        """Alias for `buffer_w`."""
3655        def __get__(self) -> Any:
3656            return self.buffer_w
3657
3658    property array:
3659        """Alias for `array_w`."""
3660        def __get__(self) -> ArrayScalar:
3661            return self.array_w
3662
3663        def __set__(self, value):
3664            self.array_w = value
3665
3666    # --- NumPy array interface (legacy) ---
3667
3668    property __array_interface__:
3669        def __get__(self):
3670            cdef buf = self.getBuffer()
3671            return buf.__array_interface__
3672
3673# --------------------------------------------------------------------
3674
3675del VecType
3676del VecOption
3677
3678# --------------------------------------------------------------------
3679