diff --git a/pixi.lock b/pixi.lock index d7d4911..e2cbc0d 100644 --- a/pixi.lock +++ b/pixi.lock @@ -1293,9 +1293,9 @@ packages: requires_python: '>=3.8' - kind: pypi name: pyarrow-stubs - version: 10.0.1.9 + version: '20240828' path: . - sha256: 5c30ac8c8008518b3a446a57a76cfde327f6fc5b7d4ab9db5deea86294d4b3b2 + sha256: 94683bcd78fcecd7a11e79fd433e5bc498768bce83e286b25a9b50f6f943b83a requires_dist: - pyarrow>=17 requires_python: '>=3.8,<4' diff --git a/pyarrow-stubs/__lib_pxi/array.pyi b/pyarrow-stubs/__lib_pxi/array.pyi index 16aed96..5a45018 100644 --- a/pyarrow-stubs/__lib_pxi/array.pyi +++ b/pyarrow-stubs/__lib_pxi/array.pyi @@ -1,4 +1,4 @@ -# mypy: disable-error-code="overload-overlap" +# mypy: disable-error-code="overload-overlap,misc" import datetime as dt @@ -1189,8 +1189,8 @@ class Array(_PandasConvertible[pd.Series], Generic[_ScalarT]): def is_nan(self) -> BooleanArray: ... def is_valid(self) -> BooleanArray: ... def fill_null( - self: Array[Scalar[_BasicDataType[_AsPyType], Any]], fill_value: _AsPyType - ) -> Array[Scalar[_BasicDataType[_AsPyType], Any]]: ... + self: Array[Scalar[_BasicDataType[_AsPyType]]], fill_value: _AsPyType + ) -> Array[Scalar[_BasicDataType[_AsPyType]]]: ... @overload def __getitem__(self, key: int) -> _ScalarT: ... @overload @@ -1215,7 +1215,7 @@ class Array(_PandasConvertible[pd.Series], Generic[_ScalarT]): ) -> scalar.Int64Scalar: ... @overload def index( - self: Array[Scalar[_BasicDataType[_AsPyType], Any]], + self: Array[Scalar[_BasicDataType[_AsPyType]]], value: _AsPyType, start: int | None = None, end: int | None = None, @@ -1226,7 +1226,7 @@ class Array(_PandasConvertible[pd.Series], Generic[_ScalarT]): def __array__(self, dtype: np.dtype | None = None, copy: bool | None = None) -> np.ndarray: ... def to_numpy(self, zero_copy_only: bool = True, writable: bool = False) -> np.ndarray: ... def to_pylist( - self: Array[Scalar[_BasicDataType[_AsPyType], Any]], + self: Array[Scalar[_BasicDataType[_AsPyType]]], ) -> list[_AsPyType | None]: ... tolist = to_pylist def validate(self, *, full: bool = False) -> None: ... diff --git a/pyarrow-stubs/__lib_pxi/scalar.pyi b/pyarrow-stubs/__lib_pxi/scalar.pyi index 12fafd6..2b8babc 100644 --- a/pyarrow-stubs/__lib_pxi/scalar.pyi +++ b/pyarrow-stubs/__lib_pxi/scalar.pyi @@ -3,7 +3,7 @@ import collections.abc import datetime as dt from decimal import Decimal -from typing import Any, Generic, Iterator, Literal, Mapping, Self, TypeAlias, overload +from typing import Any, Generic, Iterator, Mapping, Self, TypeAlias, overload import numpy as np @@ -12,17 +12,24 @@ from pyarrow.lib import Array, Buffer, MemoryPool, MonthDayNano, Tensor, _Weakre from typing_extensions import TypeVar from . import types -from .types import _AsPyType, _DataTypeT, _NewDataTypeT, _Time32Unit, _Time64Unit, _Tz, _Unit +from .types import ( + _AsPyType, + _DataTypeT, + _NewDataTypeT, + _Time32Unit, + _Time64Unit, + _Tz, + _Unit, +) -_IsValid = TypeVar("_IsValid", default=Literal[True]) _AsPyTypeK = TypeVar("_AsPyTypeK") _AsPyTypeV = TypeVar("_AsPyTypeV") -class Scalar(_Weakrefable, Generic[_DataTypeT, _IsValid]): +class Scalar(_Weakrefable, Generic[_DataTypeT]): @property def type(self) -> _DataTypeT: ... @property - def is_valid(self) -> _IsValid: ... + def is_valid(self) -> bool: ... @overload def cast( self, @@ -38,15 +45,15 @@ class Scalar(_Weakrefable, Generic[_DataTypeT, _IsValid]): safe: bool = True, options: CastOptions | None = None, memory_pool: MemoryPool | None = None, - ) -> Scalar[_NewDataTypeT, _IsValid]: ... + ) -> Scalar[_NewDataTypeT]: ... def validate(self, *, full: bool = False) -> None: ... def equals(self, other: Scalar) -> bool: ... def __hash__(self) -> int: ... @overload - def as_py(self: Scalar[types._BasicDataType[_AsPyType], Literal[True]]) -> _AsPyType: ... + def as_py(self: Scalar[types._BasicDataType[_AsPyType]]) -> _AsPyType: ... @overload def as_py( - self: Scalar[types.ListType[types._BasicDataType[_AsPyType]], Literal[True]], + self: Scalar[types.ListType[types._BasicDataType[_AsPyType]]], ) -> list[_AsPyType]: ... @overload def as_py( @@ -55,216 +62,193 @@ class Scalar(_Weakrefable, Generic[_DataTypeT, _IsValid]): types.DictionaryType[ types._BasicDataType[_AsPyTypeK], types._BasicDataType[_AsPyTypeV], Any ] - ], - Literal[True], + ] ], ) -> list[dict[_AsPyTypeK, _AsPyTypeV]]: ... @overload def as_py( self: Scalar[ types.ListType[types.DictionaryType[Any, types._BasicDataType[_AsPyTypeV], Any]], - Literal[True], ], ) -> list[dict[Any, _AsPyTypeV]]: ... @overload def as_py( self: Scalar[ types.ListType[types.DictionaryType[types._BasicDataType[_AsPyTypeK], Any, Any]], - Literal[True], ], ) -> list[dict[_AsPyTypeK, Any]]: ... @overload def as_py( - self: Scalar[types.StructType, Literal[True]], + self: Scalar[types.StructType], ) -> list[dict[str, Any]]: ... @overload def as_py( self: Scalar[ - types.MapType[types._BasicDataType[_AsPyTypeK], types._BasicDataType[_AsPyTypeV]], - Literal[True], + types.MapType[types._BasicDataType[_AsPyTypeK], types._BasicDataType[_AsPyTypeV]] ], ) -> list[tuple[_AsPyTypeK, _AsPyTypeV]]: ... @overload def as_py( - self: Scalar[ - types.MapType[Any, types._BasicDataType[_AsPyTypeV]], - Literal[True], - ], + self: Scalar[types.MapType[Any, types._BasicDataType[_AsPyTypeV]]], ) -> list[tuple[Any, _AsPyTypeV]]: ... @overload def as_py( - self: Scalar[ - types.MapType[types._BasicDataType[_AsPyTypeK], Any], - Literal[True], - ], + self: Scalar[types.MapType[types._BasicDataType[_AsPyTypeK], Any]], ) -> list[tuple[_AsPyTypeK, Any]]: ... @overload - def as_py(self: Scalar[Any, Literal[True]]) -> Any: ... - @overload - def as_py(self: Scalar[Any, Literal[False]]) -> None: ... + def as_py(self: Scalar[Any]) -> Any: ... _NULL: TypeAlias = None NA = _NULL -class NullScalar(Scalar[types.NullType, _IsValid]): ... -class BooleanScalar(Scalar[types.BoolType, _IsValid]): ... -class UInt8Scalar(Scalar[types.Uint8Type, _IsValid]): ... -class Int8Scalar(Scalar[types.Int8Type, _IsValid]): ... -class UInt16Scalar(Scalar[types.Uint16Type, _IsValid]): ... -class Int16Scalar(Scalar[types.Int16Type, _IsValid]): ... -class UInt32Scalar(Scalar[types.Uint32Type, _IsValid]): ... -class Int32Scalar(Scalar[types.Int32Type, _IsValid]): ... -class UInt64Scalar(Scalar[types.Uint64Type, _IsValid]): ... -class Int64Scalar(Scalar[types.Int64Type, _IsValid]): ... -class HalfFloatScalar(Scalar[types.Float16Type, _IsValid]): ... -class FloatScalar(Scalar[types.Float32Type, _IsValid]): ... -class DoubleScalar(Scalar[types.Float64Type, _IsValid]): ... -class Decimal128Scalar(Scalar[types.Decimal128Type, _IsValid]): ... -class Decimal256Scalar(Scalar[types.Decimal256Type, _IsValid]): ... -class Date32Scalar(Scalar[types.Date32Type, _IsValid]): ... - -class Date64Scalar(Scalar[types.Date64Type, _IsValid]): +class NullScalar(Scalar[types.NullType]): ... +class BooleanScalar(Scalar[types.BoolType]): ... +class UInt8Scalar(Scalar[types.Uint8Type]): ... +class Int8Scalar(Scalar[types.Int8Type]): ... +class UInt16Scalar(Scalar[types.Uint16Type]): ... +class Int16Scalar(Scalar[types.Int16Type]): ... +class UInt32Scalar(Scalar[types.Uint32Type]): ... +class Int32Scalar(Scalar[types.Int32Type]): ... +class UInt64Scalar(Scalar[types.Uint64Type]): ... +class Int64Scalar(Scalar[types.Int64Type]): ... +class HalfFloatScalar(Scalar[types.Float16Type]): ... +class FloatScalar(Scalar[types.Float32Type]): ... +class DoubleScalar(Scalar[types.Float64Type]): ... +class Decimal128Scalar(Scalar[types.Decimal128Type]): ... +class Decimal256Scalar(Scalar[types.Decimal256Type]): ... +class Date32Scalar(Scalar[types.Date32Type]): ... + +class Date64Scalar(Scalar[types.Date64Type]): @property def value(self) -> dt.date | None: ... -class Time32Scalar(Scalar[types.Time32Type[_Time32Unit], _IsValid]): +class Time32Scalar(Scalar[types.Time32Type[_Time32Unit]]): @property def value(self) -> dt.time | None: ... -class Time64Scalar(Scalar[types.Time64Type[_Time64Unit], _IsValid]): +class Time64Scalar(Scalar[types.Time64Type[_Time64Unit]]): @property def value(self) -> dt.time | None: ... -class TimestampScalar(Scalar[types.TimestampType[_Unit, _Tz], _IsValid]): +class TimestampScalar(Scalar[types.TimestampType[_Unit, _Tz]]): @property def value(self) -> int | None: ... -class DurationScalar(Scalar[types.DurationType[_Unit], _IsValid]): +class DurationScalar(Scalar[types.DurationType[_Unit]]): @property def value(self) -> dt.timedelta | None: ... -class MonthDayNanoIntervalScalar(Scalar[types.MonthDayNanoIntervalType, _IsValid]): +class MonthDayNanoIntervalScalar(Scalar[types.MonthDayNanoIntervalType]): @property def value(self) -> MonthDayNano | None: ... -class BinaryScalar(Scalar[types.BinaryType, _IsValid]): +class BinaryScalar(Scalar[types.BinaryType]): def as_buffer(self) -> Buffer: ... -class LargeBinaryScalar(Scalar[types.LargeBinaryType, _IsValid]): +class LargeBinaryScalar(Scalar[types.LargeBinaryType]): def as_buffer(self) -> Buffer: ... -class FixedSizeBinaryScalar(Scalar[types.FixedSizeBinaryType, _IsValid]): +class FixedSizeBinaryScalar(Scalar[types.FixedSizeBinaryType]): def as_buffer(self) -> Buffer: ... -class StringScalar(Scalar[types.StringType, _IsValid]): +class StringScalar(Scalar[types.StringType]): def as_buffer(self) -> Buffer: ... -class LargeStringScalar(Scalar[types.LargeStringType, _IsValid]): +class LargeStringScalar(Scalar[types.LargeStringType]): def as_buffer(self) -> Buffer: ... -class BinaryViewScalar(Scalar[types.BinaryViewType, _IsValid]): +class BinaryViewScalar(Scalar[types.BinaryViewType]): def as_buffer(self) -> Buffer: ... -class StringViewScalar(Scalar[types.StringViewType, _IsValid]): +class StringViewScalar(Scalar[types.StringViewType]): def as_buffer(self) -> Buffer: ... -class ListScalar(Scalar[types.ListType[_DataTypeT], _IsValid]): +class ListScalar(Scalar[types.ListType[_DataTypeT]]): @property def values(self) -> Array | None: ... def __len__(self) -> int: ... - def __getitem__(self, i: int) -> Scalar[_DataTypeT, _IsValid]: ... + def __getitem__(self, i: int) -> Scalar[_DataTypeT]: ... def __iter__(self) -> Iterator[Array]: ... -class FixedSizeListScalar(Scalar[types.FixedSizeListType[_DataTypeT, types._Size], _IsValid]): +class FixedSizeListScalar(Scalar[types.FixedSizeListType[_DataTypeT, types._Size]]): @property def values(self) -> Array | None: ... def __len__(self) -> int: ... - def __getitem__(self, i: int) -> Scalar[_DataTypeT, _IsValid]: ... + def __getitem__(self, i: int) -> Scalar[_DataTypeT]: ... def __iter__(self) -> Iterator[Array]: ... -class LargeListScalar(Scalar[types.LargeListType[_DataTypeT], _IsValid]): +class LargeListScalar(Scalar[types.LargeListType[_DataTypeT]]): @property def values(self) -> Array | None: ... def __len__(self) -> int: ... - def __getitem__(self, i: int) -> Scalar[_DataTypeT, _IsValid]: ... + def __getitem__(self, i: int) -> Scalar[_DataTypeT]: ... def __iter__(self) -> Iterator[Array]: ... -class ListViewScalar(Scalar[types.ListViewType[_DataTypeT], _IsValid]): +class ListViewScalar(Scalar[types.ListViewType[_DataTypeT]]): @property def values(self) -> Array | None: ... def __len__(self) -> int: ... - def __getitem__(self, i: int) -> Scalar[_DataTypeT, _IsValid]: ... + def __getitem__(self, i: int) -> Scalar[_DataTypeT]: ... def __iter__(self) -> Iterator[Array]: ... -class LargeListViewScalar(Scalar[types.LargeListViewType[_DataTypeT], _IsValid]): +class LargeListViewScalar(Scalar[types.LargeListViewType[_DataTypeT]]): @property def values(self) -> Array | None: ... def __len__(self) -> int: ... - def __getitem__(self, i: int) -> Scalar[_DataTypeT, _IsValid]: ... + def __getitem__(self, i: int) -> Scalar[_DataTypeT]: ... def __iter__(self) -> Iterator[Array]: ... -class StructScalar(Scalar[types.StructType, _IsValid], collections.abc.Mapping[str, Scalar]): +class StructScalar(Scalar[types.StructType], collections.abc.Mapping[str, Scalar]): def __len__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... - def __getitem__(self, __key: str) -> Scalar[Any, _IsValid]: ... # type: ignore[override] + def __getitem__(self, __key: str) -> Scalar[Any]: ... # type: ignore[override] def _as_py_tuple(self) -> list[tuple[str, Any]]: ... -class MapScalar(Scalar[types.MapType[types._K, types._ValueT], _IsValid]): +class MapScalar(Scalar[types.MapType[types._K, types._ValueT]]): @property def values(self) -> Array | None: ... def __len__(self) -> int: ... - def __getitem__( - self, i: int - ) -> tuple[Scalar[types._K, _IsValid], types._ValueT, Any, _IsValid]: ... + def __getitem__(self, i: int) -> tuple[Scalar[types._K], types._ValueT, Any]: ... @overload def __iter__( self: Scalar[ - types.MapType[types._BasicDataType[_AsPyTypeK], types._BasicDataType[_AsPyTypeV]], - _IsValid, + types.MapType[types._BasicDataType[_AsPyTypeK], types._BasicDataType[_AsPyTypeV]] ], ) -> Iterator[tuple[_AsPyTypeK, _AsPyTypeV]]: ... @overload def __iter__( - self: Scalar[ - types.MapType[Any, types._BasicDataType[_AsPyTypeV]], - _IsValid, - ], + self: Scalar[types.MapType[Any, types._BasicDataType[_AsPyTypeV]],], ) -> Iterator[tuple[Any, _AsPyTypeV]]: ... @overload def __iter__( - self: Scalar[ - types.MapType[types._BasicDataType[_AsPyTypeK], Any], - _IsValid, - ], + self: Scalar[types.MapType[types._BasicDataType[_AsPyTypeK], Any],], ) -> Iterator[tuple[_AsPyTypeK, Any]]: ... -class DictionaryScalar(Scalar[types.DictionaryType[types._IndexT, types._ValueT], _IsValid]): +class DictionaryScalar(Scalar[types.DictionaryType[types._IndexT, types._ValueT]]): @property - def index(self) -> Scalar[types._IndexT, _IsValid]: ... + def index(self) -> Scalar[types._IndexT]: ... @property - def value(self) -> Scalar[types._ValueT, _IsValid]: ... + def value(self) -> Scalar[types._ValueT]: ... @property def dictionary(self) -> Array: ... -class RunEndEncodedScalar( - Scalar[types.RunEndEncodedType[types._RunEndType, types._ValueT], _IsValid] -): +class RunEndEncodedScalar(Scalar[types.RunEndEncodedType[types._RunEndType, types._ValueT]]): @property def value(self) -> tuple[int, int] | None: ... -class UnionScalar(Scalar[types.UnionType, _IsValid]): +class UnionScalar(Scalar[types.UnionType]): @property def value(self) -> Any | None: ... @property def type_code(self) -> str: ... -class ExtensionScalar(Scalar[types.ExtensionType, _IsValid]): +class ExtensionScalar(Scalar[types.ExtensionType]): @property def value(self) -> Any | None: ... @staticmethod def from_storage(typ: types.BaseExtensionType, value) -> ExtensionScalar: ... -class FixedShapeTensorScalar(ExtensionScalar[_IsValid]): +class FixedShapeTensorScalar(ExtensionScalar): def to_numpy(self) -> np.ndarray: ... def to_tensor(self) -> Tensor: ... @@ -309,8 +293,11 @@ def scalar( value: dt.timedelta, *, from_pandas: bool | None = None, memory_pool: MemoryPool | None = None ) -> DurationScalar: ... @overload -def scalar( # type: ignore[overload-overlap] - value: MonthDayNano, *, from_pandas: bool | None = None, memory_pool: MemoryPool | None = None +def scalar( + value: MonthDayNano, + *, + from_pandas: bool | None = None, + memory_pool: MemoryPool | None = None, ) -> MonthDayNanoIntervalScalar: ... @overload def scalar( @@ -391,19 +378,19 @@ def scalar( ) -> ListScalar[types.ListType[types.MonthDayNanoIntervalType]]: ... @overload def scalar( - value: CollectionValue[_V], + value: CollectionValue, *, from_pandas: bool | None = None, memory_pool: MemoryPool | None = None, ) -> ListScalar[Any]: ... @overload def scalar( - value: _V, + value: Any, type: _DataTypeT, *, from_pandas: bool | None = None, memory_pool: MemoryPool | None = None, -) -> Scalar[_DataTypeT, _V]: ... +) -> Scalar[_DataTypeT]: ... __all__ = [ "Scalar", diff --git a/pyarrow-stubs/__lib_pxi/table.pyi b/pyarrow-stubs/__lib_pxi/table.pyi index 10cc70f..fb8a95d 100644 --- a/pyarrow-stubs/__lib_pxi/table.pyi +++ b/pyarrow-stubs/__lib_pxi/table.pyi @@ -100,7 +100,7 @@ class ChunkedArray(_PandasConvertible[pd.Series], Generic[_ScalarT]): def filter(self, mask: Mask, null_selection_behavior: NullSelectionBehavior = "drop"): ... @overload def index( - self: ChunkedArray[Scalar[_BasicDataType[_AsPyType], Any]], + self: ChunkedArray[Scalar[_BasicDataType[_AsPyType]]], value: Scalar[_DataTypeT] | _AsPyType, start: int | None = None, end: int | None = None, @@ -128,7 +128,7 @@ class ChunkedArray(_PandasConvertible[pd.Series], Generic[_ScalarT]): def iterchunks(self) -> Generator[Array[_ScalarT], None, None]: ... def __iter__(self) -> Iterator[Array[_ScalarT]]: ... def to_pylist( - self: ChunkedArray[Scalar[_BasicDataType[_AsPyType], Any]], + self: ChunkedArray[Scalar[_BasicDataType[_AsPyType]]], ) -> list[_AsPyType | None]: ... def __arrow_c_stream__(self, requested_schema=None) -> Any: ... @classmethod diff --git a/pyproject.toml b/pyproject.toml index ab1a7fc..f530f58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,3 +76,4 @@ explicit_package_bases = true files = "pyarrow-stubs" namespace_packages = true show_error_codes = true +disable_error_code = ["overload-overlap", "import-not-found"]