diff --git a/.gitattributes b/.gitattributes index ba919a1..c1044f6 100644 --- a/.gitattributes +++ b/.gitattributes @@ -14,7 +14,8 @@ mkdocs.yml export-ignore mypy.ini export-ignore pyproject.toml export-ignore pyrightconfig.json export-ignore -requirements.txt export-ignore +requirements-* export-ignore +requirements.* export-ignore scripts/ export-ignore stubs/ export-ignore tests/ export-ignore diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index c8a049f..3d954d6 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -26,6 +26,9 @@ jobs: name: ${{ matrix.python-version }} runs-on: ${{ matrix.os }} + env: + UV_SYSTEM_PYTHON: 1 + strategy: matrix: os: ['ubuntu-latest'] @@ -44,8 +47,8 @@ jobs: - name: Install dependencies run: | python -m pip install -U uv + make install-dev - name: Do linting run: | - make UV_INSTALL_FLAGS="--system" \ - install ci-check + make ci-check diff --git a/Makefile b/Makefile index 2aca929..4415944 100644 --- a/Makefile +++ b/Makefile @@ -7,9 +7,18 @@ all: install: uv pip install $(UV_INSTALL_FLAGS) -r requirements.txt +.PHONY: install-dev +install-dev: + uv pip install $(UV_INSTALL_FLAGS) -r requirements-dev.txt + .PHONY: pip-compile pip-compile: uv pip compile --upgrade requirements.in -o requirements.txt + uv pip compile --upgrade requirements-dev.in -o requirements-dev.txt + +.PHONY: update-changelog +update-changelog: + git cliff --output=CHANGELOG.md .PHONY: ci-check ci-check: diff --git a/plugin/commands/select_emoji.py b/plugin/commands/select_emoji.py index 8f57821..ba68dce 100644 --- a/plugin/commands/select_emoji.py +++ b/plugin/commands/select_emoji.py @@ -5,7 +5,7 @@ import sublime import sublime_plugin -from ..constants import DB_GENERATOR_REVISION +from ..constants import DB_REVISION from ..emoji import EmojiDatabase, get_emoji_db @@ -31,5 +31,5 @@ def callback(selected: int) -> None: if selected >= 0: self.view.run_command("insert", {"characters": db[selected].char}) - db = get_emoji_db(DB_GENERATOR_REVISION) + db = get_emoji_db(DB_REVISION) window.show_quick_panel(emoji_db_to_quick_panel_items(db), callback) diff --git a/plugin/constants.py b/plugin/constants.py index dc40b08..94b06f1 100644 --- a/plugin/constants.py +++ b/plugin/constants.py @@ -4,8 +4,10 @@ import sublime +assert __package__ + PACKAGE_NAME = __package__.partition(".")[0] DB_FILE_IN_PACKAGE = f"Packages/{PACKAGE_NAME}/data/emoji-test.txt" -DB_FILE_CACHED = Path(sublime.cache_path()) / f"{PACKAGE_NAME}/db.json" -DB_GENERATOR_REVISION = 1 +DB_FILE_CACHED = Path(sublime.cache_path()) / f"{PACKAGE_NAME}/db.bin" +DB_REVISION = 2 diff --git a/plugin/data_types.py b/plugin/data_types.py new file mode 100644 index 0000000..72f9d18 --- /dev/null +++ b/plugin/data_types.py @@ -0,0 +1,12 @@ +from __future__ import annotations + +import sys +from enum import Enum + +if sys.version_info >= (3, 11): + from enum import StrEnum +else: + + class StrEnum(str, Enum): + __str__ = str.__str__ # type: ignore + __format__ = str.__format__ # type: ignore diff --git a/plugin/emoji.py b/plugin/emoji.py index b3818fb..8704c2a 100644 --- a/plugin/emoji.py +++ b/plugin/emoji.py @@ -1,34 +1,34 @@ from __future__ import annotations -import json +import pickle import re from collections.abc import Iterable, Iterator, Sequence from dataclasses import asdict, dataclass, field -from enum import Enum from functools import lru_cache from typing import Any import sublime -from .constants import DB_FILE_CACHED, DB_FILE_IN_PACKAGE, DB_GENERATOR_REVISION +from .constants import DB_FILE_CACHED, DB_FILE_IN_PACKAGE, DB_REVISION +from .data_types import StrEnum from .utils import pp_error, pp_info, pp_warning @lru_cache -def get_emoji_db(generator_revision: int) -> EmojiDatabase: - def _create_db_cache() -> EmojiDatabase: +def get_emoji_db(db_revision: int) -> EmojiDatabase: + def _create_cache() -> EmojiDatabase: pp_info(f"Create database cache: {DB_FILE_CACHED}") db_content = sublime.load_resource(DB_FILE_IN_PACKAGE) db = EmojiDatabase.from_content(db_content) DB_FILE_CACHED.parent.mkdir(parents=True, exist_ok=True) - DB_FILE_CACHED.write_text(db.to_json(), encoding="utf-8") + DB_FILE_CACHED.write_bytes(db.to_pickle()) return db - def _load_db_cache() -> EmojiDatabase | None: + def _load_cache() -> EmojiDatabase | None: try: - db_dict: dict[str, Any] = json.loads(DB_FILE_CACHED.read_bytes()) + db_dict: dict[str, Any] = pickle.loads(DB_FILE_CACHED.read_bytes()) db = EmojiDatabase.from_dict(db_dict) - if db.generator_revision == generator_revision: + if db.db_revision == db_revision: pp_info(f"Load database cache: {DB_FILE_CACHED}") return db pp_warning("Mismatched database cache revision...") @@ -36,10 +36,10 @@ def _load_db_cache() -> EmojiDatabase | None: pp_error(f"Failed to load database cache: {e}") return None - return _load_db_cache() or _create_db_cache() + return _load_cache() or _create_cache() -class EmojiStatus(str, Enum): +class EmojiStatus(StrEnum): COMPONENT = "component" FULLY_QUALIFIED = "fully-qualified" MINIMALLY_QUALIFIED = "minimally-qualified" @@ -118,21 +118,21 @@ def str_to_code_points(s: str) -> list[str]: @dataclass class EmojiDatabase: + db_revision: int = 0 date: str = "" version: str = "" - emojis: list = field(default_factory=list) - generator_revision: int = 0 + emojis: list[Emoji] = field(default_factory=list) - _re_version = re.compile(r"^#\s*Version:\s*(?P.*)$") + _RE_VERSION = re.compile(r"^#\s*Version:\s*(?P.*)$") """Matches `# Version: 15.1`.""" - _re_date = re.compile(r"^#\s*Date:\s*(?P.*)$") + _RE_DATE = re.compile(r"^#\s*Date:\s*(?P.*)$") """Matches `# Date: 2023-06-05, 21:39:54 GMT`.""" def __getitem__(self, index: int) -> Emoji: return self.emojis[index] def __hash__(self) -> int: - return hash((self.date, self.version, self.generator_revision)) + return hash((self.db_revision, self.date, self.version)) def __iter__(self) -> Iterator[Emoji]: return iter(self.emojis) @@ -144,9 +144,9 @@ def __str__(self) -> str: """Returns a string like "emoji-test.txt".""" data = "\n".join(map(str, self.emojis)) return f""" +# DB Revision: {self.db_revision} # Date: {self.date} # Version: {self.version} -# Generator Version: {self.generator_revision} {data} # EOF """ @@ -158,34 +158,29 @@ def from_content(cls, content: str) -> EmojiDatabase: @classmethod def from_dict(cls, db: dict[str, Any]) -> EmojiDatabase: return cls( + db_revision=db.get("db_revision", DB_REVISION), date=db.get("date", ""), version=db.get("version", ""), emojis=list(map(Emoji.from_dict, db.get("emojis", []))), - generator_revision=db.get("generator_revision", 0), ) @classmethod def from_lines(cls, lines: Iterable[str]) -> EmojiDatabase: - collection = cls() + collection = cls(db_revision=DB_REVISION) for line in lines: - if e := Emoji.from_line(line): - collection.emojis.append(e) + if emoji := Emoji.from_line(line): + collection.emojis.append(emoji) continue - if m := cls._re_version.fullmatch(line): + if m := cls._RE_VERSION.fullmatch(line): collection.version = m.group("version").strip() continue - if m := cls._re_date.fullmatch(line): + if m := cls._RE_DATE.fullmatch(line): collection.date = m.group("date").strip() continue return collection - def to_json(self, *, pretty: bool = False) -> str: - if pretty: - kwargs: dict[str, Any] = {"indent": "\t"} - else: - kwargs = {"separators": (",", ":")} - return json.dumps( - dict(asdict(self), generator_revision=DB_GENERATOR_REVISION), - ensure_ascii=False, - **kwargs, - ) + def to_dict(self) -> dict[str, Any]: + return asdict(self) + + def to_pickle(self) -> bytes: + return pickle.dumps(self.to_dict(), protocol=pickle.HIGHEST_PROTOCOL) diff --git a/plugin/libs/typing_extensions.py b/plugin/libs/typing_extensions.py deleted file mode 100644 index c96bf90..0000000 --- a/plugin/libs/typing_extensions.py +++ /dev/null @@ -1,2892 +0,0 @@ -import abc -import collections -import collections.abc -import functools -import inspect -import operator -import sys -import types as _types -import typing -import warnings - -__all__ = [ - # Super-special typing primitives. - 'Any', - 'ClassVar', - 'Concatenate', - 'Final', - 'LiteralString', - 'ParamSpec', - 'ParamSpecArgs', - 'ParamSpecKwargs', - 'Self', - 'Type', - 'TypeVar', - 'TypeVarTuple', - 'Unpack', - - # ABCs (from collections.abc). - 'Awaitable', - 'AsyncIterator', - 'AsyncIterable', - 'Coroutine', - 'AsyncGenerator', - 'AsyncContextManager', - 'Buffer', - 'ChainMap', - - # Concrete collection types. - 'ContextManager', - 'Counter', - 'Deque', - 'DefaultDict', - 'NamedTuple', - 'OrderedDict', - 'TypedDict', - - # Structural checks, a.k.a. protocols. - 'SupportsAbs', - 'SupportsBytes', - 'SupportsComplex', - 'SupportsFloat', - 'SupportsIndex', - 'SupportsInt', - 'SupportsRound', - - # One-off things. - 'Annotated', - 'assert_never', - 'assert_type', - 'clear_overloads', - 'dataclass_transform', - 'deprecated', - 'Doc', - 'get_overloads', - 'final', - 'get_args', - 'get_origin', - 'get_original_bases', - 'get_protocol_members', - 'get_type_hints', - 'IntVar', - 'is_protocol', - 'is_typeddict', - 'Literal', - 'NewType', - 'overload', - 'override', - 'Protocol', - 'reveal_type', - 'runtime', - 'runtime_checkable', - 'Text', - 'TypeAlias', - 'TypeAliasType', - 'TypeGuard', - 'TYPE_CHECKING', - 'Never', - 'NoReturn', - 'Required', - 'NotRequired', - - # Pure aliases, have always been in typing - 'AbstractSet', - 'AnyStr', - 'BinaryIO', - 'Callable', - 'Collection', - 'Container', - 'Dict', - 'ForwardRef', - 'FrozenSet', - 'Generator', - 'Generic', - 'Hashable', - 'IO', - 'ItemsView', - 'Iterable', - 'Iterator', - 'KeysView', - 'List', - 'Mapping', - 'MappingView', - 'Match', - 'MutableMapping', - 'MutableSequence', - 'MutableSet', - 'Optional', - 'Pattern', - 'Reversible', - 'Sequence', - 'Set', - 'Sized', - 'TextIO', - 'Tuple', - 'Union', - 'ValuesView', - 'cast', - 'no_type_check', - 'no_type_check_decorator', -] - -# for backward compatibility -PEP_560 = True -GenericMeta = type - -# The functions below are modified copies of typing internal helpers. -# They are needed by _ProtocolMeta and they provide support for PEP 646. - - -class _Sentinel: - def __repr__(self): - return "" - - -_marker = _Sentinel() - - -def _check_generic(cls, parameters, elen=_marker): - """Check correct count for parameters of a generic cls (internal helper). - This gives a nice error message in case of count mismatch. - """ - if not elen: - raise TypeError(f"{cls} is not a generic class") - if elen is _marker: - if not hasattr(cls, "__parameters__") or not cls.__parameters__: - raise TypeError(f"{cls} is not a generic class") - elen = len(cls.__parameters__) - alen = len(parameters) - if alen != elen: - if hasattr(cls, "__parameters__"): - parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] - num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) - if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): - return - raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};" - f" actual {alen}, expected {elen}") - - -if sys.version_info >= (3, 10): - def _should_collect_from_parameters(t): - return isinstance( - t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) - ) -elif sys.version_info >= (3, 9): - def _should_collect_from_parameters(t): - return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) -else: - def _should_collect_from_parameters(t): - return isinstance(t, typing._GenericAlias) and not t._special - - -def _collect_type_vars(types, typevar_types=None): - """Collect all type variable contained in types in order of - first appearance (lexicographic order). For example:: - - _collect_type_vars((T, List[S, T])) == (T, S) - """ - if typevar_types is None: - typevar_types = typing.TypeVar - tvars = [] - for t in types: - if ( - isinstance(t, typevar_types) and - t not in tvars and - not _is_unpack(t) - ): - tvars.append(t) - if _should_collect_from_parameters(t): - tvars.extend([t for t in t.__parameters__ if t not in tvars]) - return tuple(tvars) - - -NoReturn = typing.NoReturn - -# Some unconstrained type variables. These are used by the container types. -# (These are not for export.) -T = typing.TypeVar('T') # Any type. -KT = typing.TypeVar('KT') # Key type. -VT = typing.TypeVar('VT') # Value type. -T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers. -T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant. - - -if sys.version_info >= (3, 11): - from typing import Any -else: - - class _AnyMeta(type): - def __instancecheck__(self, obj): - if self is Any: - raise TypeError("typing_extensions.Any cannot be used with isinstance()") - return super().__instancecheck__(obj) - - def __repr__(self): - if self is Any: - return "typing_extensions.Any" - return super().__repr__() - - class Any(metaclass=_AnyMeta): - """Special type indicating an unconstrained type. - - Any is compatible with every type. - - Any assumed to have all methods. - - All values assumed to be instances of Any. - Note that all the above statements are true from the point of view of - static type checkers. At runtime, Any should not be used with instance - checks. - """ - def __new__(cls, *args, **kwargs): - if cls is Any: - raise TypeError("Any cannot be instantiated") - return super().__new__(cls, *args, **kwargs) - - -ClassVar = typing.ClassVar - - -class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): - def __repr__(self): - return 'typing_extensions.' + self._name - - -Final = typing.Final - -if sys.version_info >= (3, 11): - final = typing.final -else: - # @final exists in 3.8+, but we backport it for all versions - # before 3.11 to keep support for the __final__ attribute. - # See https://bugs.python.org/issue46342 - def final(f): - """This decorator can be used to indicate to type checkers that - the decorated method cannot be overridden, and decorated class - cannot be subclassed. For example: - - class Base: - @final - def done(self) -> None: - ... - class Sub(Base): - def done(self) -> None: # Error reported by type checker - ... - @final - class Leaf: - ... - class Other(Leaf): # Error reported by type checker - ... - - There is no runtime checking of these properties. The decorator - sets the ``__final__`` attribute to ``True`` on the decorated object - to allow runtime introspection. - """ - try: - f.__final__ = True - except (AttributeError, TypeError): - # Skip the attribute silently if it is not writable. - # AttributeError happens if the object has __slots__ or a - # read-only property, TypeError if it's a builtin class. - pass - return f - - -def IntVar(name): - return typing.TypeVar(name) - - -# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 -if sys.version_info >= (3, 10, 1): - Literal = typing.Literal -else: - def _flatten_literal_params(parameters): - """An internal helper for Literal creation: flatten Literals among parameters""" - params = [] - for p in parameters: - if isinstance(p, _LiteralGenericAlias): - params.extend(p.__args__) - else: - params.append(p) - return tuple(params) - - def _value_and_type_iter(params): - for p in params: - yield p, type(p) - - class _LiteralGenericAlias(typing._GenericAlias, _root=True): - def __eq__(self, other): - if not isinstance(other, _LiteralGenericAlias): - return NotImplemented - these_args_deduped = set(_value_and_type_iter(self.__args__)) - other_args_deduped = set(_value_and_type_iter(other.__args__)) - return these_args_deduped == other_args_deduped - - def __hash__(self): - return hash(frozenset(_value_and_type_iter(self.__args__))) - - class _LiteralForm(_ExtensionsSpecialForm, _root=True): - def __init__(self, doc: str): - self._name = 'Literal' - self._doc = self.__doc__ = doc - - def __getitem__(self, parameters): - if not isinstance(parameters, tuple): - parameters = (parameters,) - - parameters = _flatten_literal_params(parameters) - - val_type_pairs = list(_value_and_type_iter(parameters)) - try: - deduped_pairs = set(val_type_pairs) - except TypeError: - # unhashable parameters - pass - else: - # similar logic to typing._deduplicate on Python 3.9+ - if len(deduped_pairs) < len(val_type_pairs): - new_parameters = [] - for pair in val_type_pairs: - if pair in deduped_pairs: - new_parameters.append(pair[0]) - deduped_pairs.remove(pair) - assert not deduped_pairs, deduped_pairs - parameters = tuple(new_parameters) - - return _LiteralGenericAlias(self, parameters) - - Literal = _LiteralForm(doc="""\ - A type that can be used to indicate to type checkers - that the corresponding value has a value literally equivalent - to the provided parameter. For example: - - var: Literal[4] = 4 - - The type checker understands that 'var' is literally equal to - the value 4 and no other value. - - Literal[...] cannot be subclassed. There is no runtime - checking verifying that the parameter is actually a value - instead of a type.""") - - -_overload_dummy = typing._overload_dummy - - -if hasattr(typing, "get_overloads"): # 3.11+ - overload = typing.overload - get_overloads = typing.get_overloads - clear_overloads = typing.clear_overloads -else: - # {module: {qualname: {firstlineno: func}}} - _overload_registry = collections.defaultdict( - functools.partial(collections.defaultdict, dict) - ) - - def overload(func): - """Decorator for overloaded functions/methods. - - In a stub file, place two or more stub definitions for the same - function in a row, each decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - - In a non-stub file (i.e. a regular .py file), do the same but - follow it with an implementation. The implementation should *not* - be decorated with @overload. For example: - - @overload - def utf8(value: None) -> None: ... - @overload - def utf8(value: bytes) -> bytes: ... - @overload - def utf8(value: str) -> bytes: ... - def utf8(value): - # implementation goes here - - The overloads for a function can be retrieved at runtime using the - get_overloads() function. - """ - # classmethod and staticmethod - f = getattr(func, "__func__", func) - try: - _overload_registry[f.__module__][f.__qualname__][ - f.__code__.co_firstlineno - ] = func - except AttributeError: - # Not a normal function; ignore. - pass - return _overload_dummy - - def get_overloads(func): - """Return all defined overloads for *func* as a sequence.""" - # classmethod and staticmethod - f = getattr(func, "__func__", func) - if f.__module__ not in _overload_registry: - return [] - mod_dict = _overload_registry[f.__module__] - if f.__qualname__ not in mod_dict: - return [] - return list(mod_dict[f.__qualname__].values()) - - def clear_overloads(): - """Clear all overloads in the registry.""" - _overload_registry.clear() - - -# This is not a real generic class. Don't use outside annotations. -Type = typing.Type - -# Various ABCs mimicking those in collections.abc. -# A few are simply re-exported for completeness. -Awaitable = typing.Awaitable -Coroutine = typing.Coroutine -AsyncIterable = typing.AsyncIterable -AsyncIterator = typing.AsyncIterator -Deque = typing.Deque -ContextManager = typing.ContextManager -AsyncContextManager = typing.AsyncContextManager -DefaultDict = typing.DefaultDict -OrderedDict = typing.OrderedDict -Counter = typing.Counter -ChainMap = typing.ChainMap -AsyncGenerator = typing.AsyncGenerator -Text = typing.Text -TYPE_CHECKING = typing.TYPE_CHECKING - - -_PROTO_ALLOWLIST = { - 'collections.abc': [ - 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable', - 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible', 'Buffer', - ], - 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'], - 'typing_extensions': ['Buffer'], -} - - -_EXCLUDED_ATTRS = { - "__abstractmethods__", "__annotations__", "__weakref__", "_is_protocol", - "_is_runtime_protocol", "__dict__", "__slots__", "__parameters__", - "__orig_bases__", "__module__", "_MutableMapping__marker", "__doc__", - "__subclasshook__", "__orig_class__", "__init__", "__new__", - "__protocol_attrs__", "__callable_proto_members_only__", -} - -if sys.version_info >= (3, 9): - _EXCLUDED_ATTRS.add("__class_getitem__") - -if sys.version_info >= (3, 12): - _EXCLUDED_ATTRS.add("__type_params__") - -_EXCLUDED_ATTRS = frozenset(_EXCLUDED_ATTRS) - - -def _get_protocol_attrs(cls): - attrs = set() - for base in cls.__mro__[:-1]: # without object - if base.__name__ in {'Protocol', 'Generic'}: - continue - annotations = getattr(base, '__annotations__', {}) - for attr in (*base.__dict__, *annotations): - if (not attr.startswith('_abc_') and attr not in _EXCLUDED_ATTRS): - attrs.add(attr) - return attrs - - -def _caller(depth=2): - try: - return sys._getframe(depth).f_globals.get('__name__', '__main__') - except (AttributeError, ValueError): # For platforms without _getframe() - return None - - -# The performance of runtime-checkable protocols is significantly improved on Python 3.12, -# so we backport the 3.12 version of Protocol to Python <=3.11 -if sys.version_info >= (3, 12): - Protocol = typing.Protocol -else: - def _allow_reckless_class_checks(depth=3): - """Allow instance and class checks for special stdlib modules. - The abc and functools modules indiscriminately call isinstance() and - issubclass() on the whole MRO of a user class, which may contain protocols. - """ - return _caller(depth) in {'abc', 'functools', None} - - def _no_init(self, *args, **kwargs): - if type(self)._is_protocol: - raise TypeError('Protocols cannot be instantiated') - - # Inheriting from typing._ProtocolMeta isn't actually desirable, - # but is necessary to allow typing.Protocol and typing_extensions.Protocol - # to mix without getting TypeErrors about "metaclass conflict" - class _ProtocolMeta(type(typing.Protocol)): - # This metaclass is somewhat unfortunate, - # but is necessary for several reasons... - # - # NOTE: DO NOT call super() in any methods in this class - # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 - # and those are slow - def __new__(mcls, name, bases, namespace, **kwargs): - if name == "Protocol" and len(bases) < 2: - pass - elif {Protocol, typing.Protocol} & set(bases): - for base in bases: - if not ( - base in {object, typing.Generic, Protocol, typing.Protocol} - or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) - or is_protocol(base) - ): - raise TypeError( - f"Protocols can only inherit from other protocols, " - f"got {base!r}" - ) - return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) - - def __init__(cls, *args, **kwargs): - abc.ABCMeta.__init__(cls, *args, **kwargs) - if getattr(cls, "_is_protocol", False): - cls.__protocol_attrs__ = _get_protocol_attrs(cls) - # PEP 544 prohibits using issubclass() - # with protocols that have non-method members. - cls.__callable_proto_members_only__ = all( - callable(getattr(cls, attr, None)) for attr in cls.__protocol_attrs__ - ) - - def __subclasscheck__(cls, other): - if cls is Protocol: - return type.__subclasscheck__(cls, other) - if ( - getattr(cls, '_is_protocol', False) - and not _allow_reckless_class_checks() - ): - if not isinstance(other, type): - # Same error message as for issubclass(1, int). - raise TypeError('issubclass() arg 1 must be a class') - if ( - not cls.__callable_proto_members_only__ - and cls.__dict__.get("__subclasshook__") is _proto_hook - ): - raise TypeError( - "Protocols with non-method members don't support issubclass()" - ) - if not getattr(cls, '_is_runtime_protocol', False): - raise TypeError( - "Instance and class checks can only be used with " - "@runtime_checkable protocols" - ) - return abc.ABCMeta.__subclasscheck__(cls, other) - - def __instancecheck__(cls, instance): - # We need this method for situations where attributes are - # assigned in __init__. - if cls is Protocol: - return type.__instancecheck__(cls, instance) - if not getattr(cls, "_is_protocol", False): - # i.e., it's a concrete subclass of a protocol - return abc.ABCMeta.__instancecheck__(cls, instance) - - if ( - not getattr(cls, '_is_runtime_protocol', False) and - not _allow_reckless_class_checks() - ): - raise TypeError("Instance and class checks can only be used with" - " @runtime_checkable protocols") - - if abc.ABCMeta.__instancecheck__(cls, instance): - return True - - for attr in cls.__protocol_attrs__: - try: - val = inspect.getattr_static(instance, attr) - except AttributeError: - break - if val is None and callable(getattr(cls, attr, None)): - break - else: - return True - - return False - - def __eq__(cls, other): - # Hack so that typing.Generic.__class_getitem__ - # treats typing_extensions.Protocol - # as equivalent to typing.Protocol - if abc.ABCMeta.__eq__(cls, other) is True: - return True - return cls is Protocol and other is typing.Protocol - - # This has to be defined, or the abc-module cache - # complains about classes with this metaclass being unhashable, - # if we define only __eq__! - def __hash__(cls) -> int: - return type.__hash__(cls) - - @classmethod - def _proto_hook(cls, other): - if not cls.__dict__.get('_is_protocol', False): - return NotImplemented - - for attr in cls.__protocol_attrs__: - for base in other.__mro__: - # Check if the members appears in the class dictionary... - if attr in base.__dict__: - if base.__dict__[attr] is None: - return NotImplemented - break - - # ...or in annotations, if it is a sub-protocol. - annotations = getattr(base, '__annotations__', {}) - if ( - isinstance(annotations, collections.abc.Mapping) - and attr in annotations - and is_protocol(other) - ): - break - else: - return NotImplemented - return True - - class Protocol(typing.Generic, metaclass=_ProtocolMeta): - __doc__ = typing.Protocol.__doc__ - __slots__ = () - _is_protocol = True - _is_runtime_protocol = False - - def __init_subclass__(cls, *args, **kwargs): - super().__init_subclass__(*args, **kwargs) - - # Determine if this is a protocol or a concrete subclass. - if not cls.__dict__.get('_is_protocol', False): - cls._is_protocol = any(b is Protocol for b in cls.__bases__) - - # Set (or override) the protocol subclass hook. - if '__subclasshook__' not in cls.__dict__: - cls.__subclasshook__ = _proto_hook - - # Prohibit instantiation for protocol classes - if cls._is_protocol and cls.__init__ is Protocol.__init__: - cls.__init__ = _no_init - - -# The "runtime" alias exists for backwards compatibility. -runtime = runtime_checkable = typing.runtime_checkable - - -# Our version of runtime-checkable protocols is faster on Python 3.8-3.11 -if sys.version_info >= (3, 12): - SupportsInt = typing.SupportsInt - SupportsFloat = typing.SupportsFloat - SupportsComplex = typing.SupportsComplex - SupportsBytes = typing.SupportsBytes - SupportsIndex = typing.SupportsIndex - SupportsAbs = typing.SupportsAbs - SupportsRound = typing.SupportsRound -else: - @runtime_checkable - class SupportsInt(Protocol): - """An ABC with one abstract method __int__.""" - __slots__ = () - - @abc.abstractmethod - def __int__(self) -> int: - pass - - @runtime_checkable - class SupportsFloat(Protocol): - """An ABC with one abstract method __float__.""" - __slots__ = () - - @abc.abstractmethod - def __float__(self) -> float: - pass - - @runtime_checkable - class SupportsComplex(Protocol): - """An ABC with one abstract method __complex__.""" - __slots__ = () - - @abc.abstractmethod - def __complex__(self) -> complex: - pass - - @runtime_checkable - class SupportsBytes(Protocol): - """An ABC with one abstract method __bytes__.""" - __slots__ = () - - @abc.abstractmethod - def __bytes__(self) -> bytes: - pass - - @runtime_checkable - class SupportsIndex(Protocol): - __slots__ = () - - @abc.abstractmethod - def __index__(self) -> int: - pass - - @runtime_checkable - class SupportsAbs(Protocol[T_co]): - """ - An ABC with one abstract method __abs__ that is covariant in its return type. - """ - __slots__ = () - - @abc.abstractmethod - def __abs__(self) -> T_co: - pass - - @runtime_checkable - class SupportsRound(Protocol[T_co]): - """ - An ABC with one abstract method __round__ that is covariant in its return type. - """ - __slots__ = () - - @abc.abstractmethod - def __round__(self, ndigits: int = 0) -> T_co: - pass - - -def _ensure_subclassable(mro_entries): - def inner(func): - if sys.implementation.name == "pypy" and sys.version_info < (3, 9): - cls_dict = { - "__call__": staticmethod(func), - "__mro_entries__": staticmethod(mro_entries) - } - t = type(func.__name__, (), cls_dict) - return functools.update_wrapper(t(), func) - else: - func.__mro_entries__ = mro_entries - return func - return inner - - -if sys.version_info >= (3, 13): - # The standard library TypedDict in Python 3.8 does not store runtime information - # about which (if any) keys are optional. See https://bugs.python.org/issue38834 - # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" - # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 - # The standard library TypedDict below Python 3.11 does not store runtime - # information about optional and required keys when using Required or NotRequired. - # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. - # Aaaand on 3.12 we add __orig_bases__ to TypedDict - # to enable better runtime introspection. - # On 3.13 we deprecate some odd ways of creating TypedDicts. - TypedDict = typing.TypedDict - _TypedDictMeta = typing._TypedDictMeta - is_typeddict = typing.is_typeddict -else: - # 3.10.0 and later - _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters - - class _TypedDictMeta(type): - def __new__(cls, name, bases, ns, total=True): - """Create new typed dict class object. - - This method is called when TypedDict is subclassed, - or when TypedDict is instantiated. This way - TypedDict supports all three syntax forms described in its docstring. - Subclasses and instances of TypedDict return actual dictionaries. - """ - for base in bases: - if type(base) is not _TypedDictMeta and base is not typing.Generic: - raise TypeError('cannot inherit from both a TypedDict type ' - 'and a non-TypedDict base class') - - if any(issubclass(b, typing.Generic) for b in bases): - generic_base = (typing.Generic,) - else: - generic_base = () - - # typing.py generally doesn't let you inherit from plain Generic, unless - # the name of the class happens to be "Protocol" - tp_dict = type.__new__(_TypedDictMeta, "Protocol", (*generic_base, dict), ns) - tp_dict.__name__ = name - if tp_dict.__qualname__ == "Protocol": - tp_dict.__qualname__ = name - - if not hasattr(tp_dict, '__orig_bases__'): - tp_dict.__orig_bases__ = bases - - annotations = {} - own_annotations = ns.get('__annotations__', {}) - msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" - if _TAKES_MODULE: - own_annotations = { - n: typing._type_check(tp, msg, module=tp_dict.__module__) - for n, tp in own_annotations.items() - } - else: - own_annotations = { - n: typing._type_check(tp, msg) - for n, tp in own_annotations.items() - } - required_keys = set() - optional_keys = set() - - for base in bases: - annotations.update(base.__dict__.get('__annotations__', {})) - required_keys.update(base.__dict__.get('__required_keys__', ())) - optional_keys.update(base.__dict__.get('__optional_keys__', ())) - - annotations.update(own_annotations) - for annotation_key, annotation_type in own_annotations.items(): - annotation_origin = get_origin(annotation_type) - if annotation_origin is Annotated: - annotation_args = get_args(annotation_type) - if annotation_args: - annotation_type = annotation_args[0] - annotation_origin = get_origin(annotation_type) - - if annotation_origin is Required: - required_keys.add(annotation_key) - elif annotation_origin is NotRequired: - optional_keys.add(annotation_key) - elif total: - required_keys.add(annotation_key) - else: - optional_keys.add(annotation_key) - - tp_dict.__annotations__ = annotations - tp_dict.__required_keys__ = frozenset(required_keys) - tp_dict.__optional_keys__ = frozenset(optional_keys) - if not hasattr(tp_dict, '__total__'): - tp_dict.__total__ = total - return tp_dict - - __call__ = dict # static method - - def __subclasscheck__(cls, other): - # Typed dicts are only for static structural subtyping. - raise TypeError('TypedDict does not support instance and class checks') - - __instancecheck__ = __subclasscheck__ - - _TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {}) - - @_ensure_subclassable(lambda bases: (_TypedDict,)) - def TypedDict(typename, fields=_marker, /, *, total=True, **kwargs): - """A simple typed namespace. At runtime it is equivalent to a plain dict. - - TypedDict creates a dictionary type such that a type checker will expect all - instances to have a certain set of keys, where each key is - associated with a value of a consistent type. This expectation - is not checked at runtime. - - Usage:: - - class Point2D(TypedDict): - x: int - y: int - label: str - - a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK - b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check - - assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') - - The type info can be accessed via the Point2D.__annotations__ dict, and - the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. - TypedDict supports an additional equivalent form:: - - Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) - - By default, all keys must be present in a TypedDict. It is possible - to override this by specifying totality:: - - class Point2D(TypedDict, total=False): - x: int - y: int - - This means that a Point2D TypedDict can have any of the keys omitted. A type - checker is only expected to support a literal False or True as the value of - the total argument. True is the default, and makes all items defined in the - class body be required. - - The Required and NotRequired special forms can also be used to mark - individual keys as being required or not required:: - - class Point2D(TypedDict): - x: int # the "x" key must always be present (Required is the default) - y: NotRequired[int] # the "y" key can be omitted - - See PEP 655 for more details on Required and NotRequired. - """ - if fields is _marker or fields is None: - if fields is _marker: - deprecated_thing = "Failing to pass a value for the 'fields' parameter" - else: - deprecated_thing = "Passing `None` as the 'fields' parameter" - - example = f"`{typename} = TypedDict({typename!r}, {{}})`" - deprecation_msg = ( - f"{deprecated_thing} is deprecated and will be disallowed in " - "Python 3.15. To create a TypedDict class with 0 fields " - "using the functional syntax, pass an empty dictionary, e.g. " - ) + example + "." - warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) - fields = kwargs - elif kwargs: - raise TypeError("TypedDict takes either a dict or keyword arguments," - " but not both") - if kwargs: - warnings.warn( - "The kwargs-based syntax for TypedDict definitions is deprecated " - "in Python 3.11, will be removed in Python 3.13, and may not be " - "understood by third-party type checkers.", - DeprecationWarning, - stacklevel=2, - ) - - ns = {'__annotations__': dict(fields)} - module = _caller() - if module is not None: - # Setting correct module is necessary to make typed dict classes pickleable. - ns['__module__'] = module - - td = _TypedDictMeta(typename, (), ns, total=total) - td.__orig_bases__ = (TypedDict,) - return td - - if hasattr(typing, "_TypedDictMeta"): - _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) - else: - _TYPEDDICT_TYPES = (_TypedDictMeta,) - - def is_typeddict(tp): - """Check if an annotation is a TypedDict class - - For example:: - class Film(TypedDict): - title: str - year: int - - is_typeddict(Film) # => True - is_typeddict(Union[list, str]) # => False - """ - # On 3.8, this would otherwise return True - if hasattr(typing, "TypedDict") and tp is typing.TypedDict: - return False - return isinstance(tp, _TYPEDDICT_TYPES) - - -if hasattr(typing, "assert_type"): - assert_type = typing.assert_type - -else: - def assert_type(val, typ, /): - """Assert (to the type checker) that the value is of the given type. - - When the type checker encounters a call to assert_type(), it - emits an error if the value is not of the specified type:: - - def greet(name: str) -> None: - assert_type(name, str) # ok - assert_type(name, int) # type checker error - - At runtime this returns the first argument unchanged and otherwise - does nothing. - """ - return val - - -if hasattr(typing, "Required"): # 3.11+ - get_type_hints = typing.get_type_hints -else: # <=3.10 - # replaces _strip_annotations() - def _strip_extras(t): - """Strips Annotated, Required and NotRequired from a given type.""" - if isinstance(t, _AnnotatedAlias): - return _strip_extras(t.__origin__) - if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired): - return _strip_extras(t.__args__[0]) - if isinstance(t, typing._GenericAlias): - stripped_args = tuple(_strip_extras(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - return t.copy_with(stripped_args) - if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): - stripped_args = tuple(_strip_extras(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - return _types.GenericAlias(t.__origin__, stripped_args) - if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): - stripped_args = tuple(_strip_extras(a) for a in t.__args__) - if stripped_args == t.__args__: - return t - return functools.reduce(operator.or_, stripped_args) - - return t - - def get_type_hints(obj, globalns=None, localns=None, include_extras=False): - """Return type hints for an object. - - This is often the same as obj.__annotations__, but it handles - forward references encoded as string literals, adds Optional[t] if a - default value equal to None is set and recursively replaces all - 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' - (unless 'include_extras=True'). - - The argument may be a module, class, method, or function. The annotations - are returned as a dictionary. For classes, annotations include also - inherited members. - - TypeError is raised if the argument is not of a type that can contain - annotations, and an empty dictionary is returned if no annotations are - present. - - BEWARE -- the behavior of globalns and localns is counterintuitive - (unless you are familiar with how eval() and exec() work). The - search order is locals first, then globals. - - - If no dict arguments are passed, an attempt is made to use the - globals from obj (or the respective module's globals for classes), - and these are also used as the locals. If the object does not appear - to have globals, an empty dictionary is used. - - - If one dict argument is passed, it is used for both globals and - locals. - - - If two dict arguments are passed, they specify globals and - locals, respectively. - """ - if hasattr(typing, "Annotated"): # 3.9+ - hint = typing.get_type_hints( - obj, globalns=globalns, localns=localns, include_extras=True - ) - else: # 3.8 - hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) - if include_extras: - return hint - return {k: _strip_extras(t) for k, t in hint.items()} - - -# Python 3.9+ has PEP 593 (Annotated) -if hasattr(typing, 'Annotated'): - Annotated = typing.Annotated - # Not exported and not a public API, but needed for get_origin() and get_args() - # to work. - _AnnotatedAlias = typing._AnnotatedAlias -# 3.8 -else: - class _AnnotatedAlias(typing._GenericAlias, _root=True): - """Runtime representation of an annotated type. - - At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' - with extra annotations. The alias behaves like a normal typing alias, - instantiating is the same as instantiating the underlying type, binding - it to types is also the same. - """ - def __init__(self, origin, metadata): - if isinstance(origin, _AnnotatedAlias): - metadata = origin.__metadata__ + metadata - origin = origin.__origin__ - super().__init__(origin, origin) - self.__metadata__ = metadata - - def copy_with(self, params): - assert len(params) == 1 - new_type = params[0] - return _AnnotatedAlias(new_type, self.__metadata__) - - def __repr__(self): - return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " - f"{', '.join(repr(a) for a in self.__metadata__)}]") - - def __reduce__(self): - return operator.getitem, ( - Annotated, (self.__origin__,) + self.__metadata__ - ) - - def __eq__(self, other): - if not isinstance(other, _AnnotatedAlias): - return NotImplemented - if self.__origin__ != other.__origin__: - return False - return self.__metadata__ == other.__metadata__ - - def __hash__(self): - return hash((self.__origin__, self.__metadata__)) - - class Annotated: - """Add context specific metadata to a type. - - Example: Annotated[int, runtime_check.Unsigned] indicates to the - hypothetical runtime_check module that this type is an unsigned int. - Every other consumer of this type can ignore this metadata and treat - this type as int. - - The first argument to Annotated must be a valid type (and will be in - the __origin__ field), the remaining arguments are kept as a tuple in - the __extra__ field. - - Details: - - - It's an error to call `Annotated` with less than two arguments. - - Nested Annotated are flattened:: - - Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] - - - Instantiating an annotated type is equivalent to instantiating the - underlying type:: - - Annotated[C, Ann1](5) == C(5) - - - Annotated can be used as a generic type alias:: - - Optimized = Annotated[T, runtime.Optimize()] - Optimized[int] == Annotated[int, runtime.Optimize()] - - OptimizedList = Annotated[List[T], runtime.Optimize()] - OptimizedList[int] == Annotated[List[int], runtime.Optimize()] - """ - - __slots__ = () - - def __new__(cls, *args, **kwargs): - raise TypeError("Type Annotated cannot be instantiated.") - - @typing._tp_cache - def __class_getitem__(cls, params): - if not isinstance(params, tuple) or len(params) < 2: - raise TypeError("Annotated[...] should be used " - "with at least two arguments (a type and an " - "annotation).") - allowed_special_forms = (ClassVar, Final) - if get_origin(params[0]) in allowed_special_forms: - origin = params[0] - else: - msg = "Annotated[t, ...]: t must be a type." - origin = typing._type_check(params[0], msg) - metadata = tuple(params[1:]) - return _AnnotatedAlias(origin, metadata) - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - f"Cannot subclass {cls.__module__}.Annotated" - ) - -# Python 3.8 has get_origin() and get_args() but those implementations aren't -# Annotated-aware, so we can't use those. Python 3.9's versions don't support -# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. -if sys.version_info[:2] >= (3, 10): - get_origin = typing.get_origin - get_args = typing.get_args -# 3.8-3.9 -else: - try: - # 3.9+ - from typing import _BaseGenericAlias - except ImportError: - _BaseGenericAlias = typing._GenericAlias - try: - # 3.9+ - from typing import GenericAlias as _typing_GenericAlias - except ImportError: - _typing_GenericAlias = typing._GenericAlias - - def get_origin(tp): - """Get the unsubscripted version of a type. - - This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar - and Annotated. Return None for unsupported types. Examples:: - - get_origin(Literal[42]) is Literal - get_origin(int) is None - get_origin(ClassVar[int]) is ClassVar - get_origin(Generic) is Generic - get_origin(Generic[T]) is Generic - get_origin(Union[T, int]) is Union - get_origin(List[Tuple[T, T]][int]) == list - get_origin(P.args) is P - """ - if isinstance(tp, _AnnotatedAlias): - return Annotated - if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias, - ParamSpecArgs, ParamSpecKwargs)): - return tp.__origin__ - if tp is typing.Generic: - return typing.Generic - return None - - def get_args(tp): - """Get type arguments with all substitutions performed. - - For unions, basic simplifications used by Union constructor are performed. - Examples:: - get_args(Dict[str, int]) == (str, int) - get_args(int) == () - get_args(Union[int, Union[T, int], str][int]) == (int, str) - get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) - get_args(Callable[[], T][int]) == ([], int) - """ - if isinstance(tp, _AnnotatedAlias): - return (tp.__origin__,) + tp.__metadata__ - if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): - if getattr(tp, "_special", False): - return () - res = tp.__args__ - if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: - res = (list(res[:-1]), res[-1]) - return res - return () - - -# 3.10+ -if hasattr(typing, 'TypeAlias'): - TypeAlias = typing.TypeAlias -# 3.9 -elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm - def TypeAlias(self, parameters): - """Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example above. - """ - raise TypeError(f"{self} is not subscriptable") -# 3.8 -else: - TypeAlias = _ExtensionsSpecialForm( - 'TypeAlias', - doc="""Special marker indicating that an assignment should - be recognized as a proper type alias definition by type - checkers. - - For example:: - - Predicate: TypeAlias = Callable[..., bool] - - It's invalid when used anywhere except as in the example - above.""" - ) - - -def _set_default(type_param, default): - if isinstance(default, (tuple, list)): - type_param.__default__ = tuple((typing._type_check(d, "Default must be a type") - for d in default)) - elif default != _marker: - if isinstance(type_param, ParamSpec) and default is ...: # ... not valid <3.11 - type_param.__default__ = default - else: - type_param.__default__ = typing._type_check(default, "Default must be a type") - else: - type_param.__default__ = None - - -def _set_module(typevarlike): - # for pickling: - def_mod = _caller(depth=3) - if def_mod != 'typing_extensions': - typevarlike.__module__ = def_mod - - -class _DefaultMixin: - """Mixin for TypeVarLike defaults.""" - - __slots__ = () - __init__ = _set_default - - -# Classes using this metaclass must provide a _backported_typevarlike ClassVar -class _TypeVarLikeMeta(type): - def __instancecheck__(cls, __instance: Any) -> bool: - return isinstance(__instance, cls._backported_typevarlike) - - -# Add default and infer_variance parameters from PEP 696 and 695 -class TypeVar(metaclass=_TypeVarLikeMeta): - """Type variable.""" - - _backported_typevarlike = typing.TypeVar - - def __new__(cls, name, *constraints, bound=None, - covariant=False, contravariant=False, - default=_marker, infer_variance=False): - if hasattr(typing, "TypeAliasType"): - # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar - typevar = typing.TypeVar(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant, - infer_variance=infer_variance) - else: - typevar = typing.TypeVar(name, *constraints, bound=bound, - covariant=covariant, contravariant=contravariant) - if infer_variance and (covariant or contravariant): - raise ValueError("Variance cannot be specified with infer_variance.") - typevar.__infer_variance__ = infer_variance - _set_default(typevar, default) - _set_module(typevar) - return typevar - - def __init_subclass__(cls) -> None: - raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") - - -# Python 3.10+ has PEP 612 -if hasattr(typing, 'ParamSpecArgs'): - ParamSpecArgs = typing.ParamSpecArgs - ParamSpecKwargs = typing.ParamSpecKwargs -# 3.8-3.9 -else: - class _Immutable: - """Mixin to indicate that object should not be copied.""" - __slots__ = () - - def __copy__(self): - return self - - def __deepcopy__(self, memo): - return self - - class ParamSpecArgs(_Immutable): - """The args for a ParamSpec object. - - Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. - - ParamSpecArgs objects have a reference back to their ParamSpec: - - P.args.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return f"{self.__origin__.__name__}.args" - - def __eq__(self, other): - if not isinstance(other, ParamSpecArgs): - return NotImplemented - return self.__origin__ == other.__origin__ - - class ParamSpecKwargs(_Immutable): - """The kwargs for a ParamSpec object. - - Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. - - ParamSpecKwargs objects have a reference back to their ParamSpec: - - P.kwargs.__origin__ is P - - This type is meant for runtime introspection and has no special meaning to - static type checkers. - """ - def __init__(self, origin): - self.__origin__ = origin - - def __repr__(self): - return f"{self.__origin__.__name__}.kwargs" - - def __eq__(self, other): - if not isinstance(other, ParamSpecKwargs): - return NotImplemented - return self.__origin__ == other.__origin__ - -# 3.10+ -if hasattr(typing, 'ParamSpec'): - - # Add default parameter - PEP 696 - class ParamSpec(metaclass=_TypeVarLikeMeta): - """Parameter specification.""" - - _backported_typevarlike = typing.ParamSpec - - def __new__(cls, name, *, bound=None, - covariant=False, contravariant=False, - infer_variance=False, default=_marker): - if hasattr(typing, "TypeAliasType"): - # PEP 695 implemented, can pass infer_variance to typing.TypeVar - paramspec = typing.ParamSpec(name, bound=bound, - covariant=covariant, - contravariant=contravariant, - infer_variance=infer_variance) - else: - paramspec = typing.ParamSpec(name, bound=bound, - covariant=covariant, - contravariant=contravariant) - paramspec.__infer_variance__ = infer_variance - - _set_default(paramspec, default) - _set_module(paramspec) - return paramspec - - def __init_subclass__(cls) -> None: - raise TypeError(f"type '{__name__}.ParamSpec' is not an acceptable base type") - -# 3.8-3.9 -else: - - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class ParamSpec(list, _DefaultMixin): - """Parameter specification variable. - - Usage:: - - P = ParamSpec('P') - - Parameter specification variables exist primarily for the benefit of static - type checkers. They are used to forward the parameter types of one - callable to another callable, a pattern commonly found in higher order - functions and decorators. They are only valid when used in ``Concatenate``, - or s the first argument to ``Callable``. In Python 3.10 and higher, - they are also supported in user-defined Generics at runtime. - See class Generic for more information on generic types. An - example for annotating a decorator:: - - T = TypeVar('T') - P = ParamSpec('P') - - def add_logging(f: Callable[P, T]) -> Callable[P, T]: - '''A type-safe decorator to add logging to a function.''' - def inner(*args: P.args, **kwargs: P.kwargs) -> T: - logging.info(f'{f.__name__} was called') - return f(*args, **kwargs) - return inner - - @add_logging - def add_two(x: float, y: float) -> float: - '''Add two numbers together.''' - return x + y - - Parameter specification variables defined with covariant=True or - contravariant=True can be used to declare covariant or contravariant - generic types. These keyword arguments are valid, but their actual semantics - are yet to be decided. See PEP 612 for details. - - Parameter specification variables can be introspected. e.g.: - - P.__name__ == 'T' - P.__bound__ == None - P.__covariant__ == False - P.__contravariant__ == False - - Note that only parameter specification variables defined in global scope can - be pickled. - """ - - # Trick Generic __parameters__. - __class__ = typing.TypeVar - - @property - def args(self): - return ParamSpecArgs(self) - - @property - def kwargs(self): - return ParamSpecKwargs(self) - - def __init__(self, name, *, bound=None, covariant=False, contravariant=False, - infer_variance=False, default=_marker): - super().__init__([self]) - self.__name__ = name - self.__covariant__ = bool(covariant) - self.__contravariant__ = bool(contravariant) - self.__infer_variance__ = bool(infer_variance) - if bound: - self.__bound__ = typing._type_check(bound, 'Bound must be a type.') - else: - self.__bound__ = None - _DefaultMixin.__init__(self, default) - - # for pickling: - def_mod = _caller() - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - def __repr__(self): - if self.__infer_variance__: - prefix = '' - elif self.__covariant__: - prefix = '+' - elif self.__contravariant__: - prefix = '-' - else: - prefix = '~' - return prefix + self.__name__ - - def __hash__(self): - return object.__hash__(self) - - def __eq__(self, other): - return self is other - - def __reduce__(self): - return self.__name__ - - # Hack to get typing._type_check to pass. - def __call__(self, *args, **kwargs): - pass - - -# 3.8-3.9 -if not hasattr(typing, 'Concatenate'): - # Inherits from list as a workaround for Callable checks in Python < 3.9.2. - class _ConcatenateGenericAlias(list): - - # Trick Generic into looking into this for __parameters__. - __class__ = typing._GenericAlias - - # Flag in 3.8. - _special = False - - def __init__(self, origin, args): - super().__init__(args) - self.__origin__ = origin - self.__args__ = args - - def __repr__(self): - _type_repr = typing._type_repr - return (f'{_type_repr(self.__origin__)}' - f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]') - - def __hash__(self): - return hash((self.__origin__, self.__args__)) - - # Hack to get typing._type_check to pass in Generic. - def __call__(self, *args, **kwargs): - pass - - @property - def __parameters__(self): - return tuple( - tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec)) - ) - - -# 3.8-3.9 -@typing._tp_cache -def _concatenate_getitem(self, parameters): - if parameters == (): - raise TypeError("Cannot take a Concatenate of no types.") - if not isinstance(parameters, tuple): - parameters = (parameters,) - if not isinstance(parameters[-1], ParamSpec): - raise TypeError("The last parameter to Concatenate should be a " - "ParamSpec variable.") - msg = "Concatenate[arg, ...]: each arg must be a type." - parameters = tuple(typing._type_check(p, msg) for p in parameters) - return _ConcatenateGenericAlias(self, parameters) - - -# 3.10+ -if hasattr(typing, 'Concatenate'): - Concatenate = typing.Concatenate - _ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa: F811 -# 3.9 -elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm - def Concatenate(self, parameters): - """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """ - return _concatenate_getitem(self, parameters) -# 3.8 -else: - class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): - def __getitem__(self, parameters): - return _concatenate_getitem(self, parameters) - - Concatenate = _ConcatenateForm( - 'Concatenate', - doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a - higher order function which adds, removes or transforms parameters of a - callable. - - For example:: - - Callable[Concatenate[int, P], int] - - See PEP 612 for detailed information. - """) - -# 3.10+ -if hasattr(typing, 'TypeGuard'): - TypeGuard = typing.TypeGuard -# 3.9 -elif sys.version_info[:2] >= (3, 9): - @_ExtensionsSpecialForm - def TypeGuard(self, parameters): - """Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """ - item = typing._type_check(parameters, f'{self} accepts only a single type.') - return typing._GenericAlias(self, (item,)) -# 3.8 -else: - class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type') - return typing._GenericAlias(self, (item,)) - - TypeGuard = _TypeGuardForm( - 'TypeGuard', - doc="""Special typing form used to annotate the return type of a user-defined - type guard function. ``TypeGuard`` only accepts a single type argument. - At runtime, functions marked this way should return a boolean. - - ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static - type checkers to determine a more precise type of an expression within a - program's code flow. Usually type narrowing is done by analyzing - conditional code flow and applying the narrowing to a block of code. The - conditional expression here is sometimes referred to as a "type guard". - - Sometimes it would be convenient to use a user-defined boolean function - as a type guard. Such a function should use ``TypeGuard[...]`` as its - return type to alert static type checkers to this intention. - - Using ``-> TypeGuard`` tells the static type checker that for a given - function: - - 1. The return value is a boolean. - 2. If the return value is ``True``, the type of its argument - is the type inside ``TypeGuard``. - - For example:: - - def is_str(val: Union[str, float]): - # "isinstance" type guard - if isinstance(val, str): - # Type of ``val`` is narrowed to ``str`` - ... - else: - # Else, type of ``val`` is narrowed to ``float``. - ... - - Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower - form of ``TypeA`` (it can even be a wider form) and this may lead to - type-unsafe results. The main reason is to allow for things like - narrowing ``List[object]`` to ``List[str]`` even though the latter is not - a subtype of the former, since ``List`` is invariant. The responsibility of - writing type-safe type guards is left to the user. - - ``TypeGuard`` also works with type variables. For more information, see - PEP 647 (User-Defined Type Guards). - """) - - -# Vendored from cpython typing._SpecialFrom -class _SpecialForm(typing._Final, _root=True): - __slots__ = ('_name', '__doc__', '_getitem') - - def __init__(self, getitem): - self._getitem = getitem - self._name = getitem.__name__ - self.__doc__ = getitem.__doc__ - - def __getattr__(self, item): - if item in {'__name__', '__qualname__'}: - return self._name - - raise AttributeError(item) - - def __mro_entries__(self, bases): - raise TypeError(f"Cannot subclass {self!r}") - - def __repr__(self): - return f'typing_extensions.{self._name}' - - def __reduce__(self): - return self._name - - def __call__(self, *args, **kwds): - raise TypeError(f"Cannot instantiate {self!r}") - - def __or__(self, other): - return typing.Union[self, other] - - def __ror__(self, other): - return typing.Union[other, self] - - def __instancecheck__(self, obj): - raise TypeError(f"{self} cannot be used with isinstance()") - - def __subclasscheck__(self, cls): - raise TypeError(f"{self} cannot be used with issubclass()") - - @typing._tp_cache - def __getitem__(self, parameters): - return self._getitem(self, parameters) - - -if hasattr(typing, "LiteralString"): # 3.11+ - LiteralString = typing.LiteralString -else: - @_SpecialForm - def LiteralString(self, params): - """Represents an arbitrary literal string. - - Example:: - - from typing_extensions import LiteralString - - def query(sql: LiteralString) -> ...: - ... - - query("SELECT * FROM table") # ok - query(f"SELECT * FROM {input()}") # not ok - - See PEP 675 for details. - - """ - raise TypeError(f"{self} is not subscriptable") - - -if hasattr(typing, "Self"): # 3.11+ - Self = typing.Self -else: - @_SpecialForm - def Self(self, params): - """Used to spell the type of "self" in classes. - - Example:: - - from typing import Self - - class ReturnsSelf: - def parse(self, data: bytes) -> Self: - ... - return self - - """ - - raise TypeError(f"{self} is not subscriptable") - - -if hasattr(typing, "Never"): # 3.11+ - Never = typing.Never -else: - @_SpecialForm - def Never(self, params): - """The bottom type, a type that has no members. - - This can be used to define a function that should never be - called, or a function that never returns:: - - from typing_extensions import Never - - def never_call_me(arg: Never) -> None: - pass - - def int_or_str(arg: int | str) -> None: - never_call_me(arg) # type checker error - match arg: - case int(): - print("It's an int") - case str(): - print("It's a str") - case _: - never_call_me(arg) # ok, arg is of type Never - - """ - - raise TypeError(f"{self} is not subscriptable") - - -if hasattr(typing, 'Required'): # 3.11+ - Required = typing.Required - NotRequired = typing.NotRequired -elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 - @_ExtensionsSpecialForm - def Required(self, parameters): - """A special typing construct to mark a key of a total=False TypedDict - as required. For example: - - class Movie(TypedDict, total=False): - title: Required[str] - year: int - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - - There is no runtime checking that a required key is actually provided - when instantiating a related TypedDict. - """ - item = typing._type_check(parameters, f'{self._name} accepts only a single type.') - return typing._GenericAlias(self, (item,)) - - @_ExtensionsSpecialForm - def NotRequired(self, parameters): - """A special typing construct to mark a key of a TypedDict as - potentially missing. For example: - - class Movie(TypedDict): - title: str - year: NotRequired[int] - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - """ - item = typing._type_check(parameters, f'{self._name} accepts only a single type.') - return typing._GenericAlias(self, (item,)) - -else: # 3.8 - class _RequiredForm(_ExtensionsSpecialForm, _root=True): - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type.') - return typing._GenericAlias(self, (item,)) - - Required = _RequiredForm( - 'Required', - doc="""A special typing construct to mark a key of a total=False TypedDict - as required. For example: - - class Movie(TypedDict, total=False): - title: Required[str] - year: int - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - - There is no runtime checking that a required key is actually provided - when instantiating a related TypedDict. - """) - NotRequired = _RequiredForm( - 'NotRequired', - doc="""A special typing construct to mark a key of a TypedDict as - potentially missing. For example: - - class Movie(TypedDict): - title: str - year: NotRequired[int] - - m = Movie( - title='The Matrix', # typechecker error if key is omitted - year=1999, - ) - """) - - -_UNPACK_DOC = """\ -Type unpack operator. - -The type unpack operator takes the child types from some container type, -such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For -example: - - # For some generic class `Foo`: - Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] - - Ts = TypeVarTuple('Ts') - # Specifies that `Bar` is generic in an arbitrary number of types. - # (Think of `Ts` as a tuple of an arbitrary number of individual - # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the - # `Generic[]`.) - class Bar(Generic[Unpack[Ts]]): ... - Bar[int] # Valid - Bar[int, str] # Also valid - -From Python 3.11, this can also be done using the `*` operator: - - Foo[*tuple[int, str]] - class Bar(Generic[*Ts]): ... - -The operator can also be used along with a `TypedDict` to annotate -`**kwargs` in a function signature. For instance: - - class Movie(TypedDict): - name: str - year: int - - # This function expects two keyword arguments - *name* of type `str` and - # *year* of type `int`. - def foo(**kwargs: Unpack[Movie]): ... - -Note that there is only some runtime checking of this operator. Not -everything the runtime allows may be accepted by static type checkers. - -For more information, see PEP 646 and PEP 692. -""" - - -if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] - Unpack = typing.Unpack - - def _is_unpack(obj): - return get_origin(obj) is Unpack - -elif sys.version_info[:2] >= (3, 9): # 3.9+ - class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): - def __init__(self, getitem): - super().__init__(getitem) - self.__doc__ = _UNPACK_DOC - - class _UnpackAlias(typing._GenericAlias, _root=True): - __class__ = typing.TypeVar - - @_UnpackSpecialForm - def Unpack(self, parameters): - item = typing._type_check(parameters, f'{self._name} accepts only a single type.') - return _UnpackAlias(self, (item,)) - - def _is_unpack(obj): - return isinstance(obj, _UnpackAlias) - -else: # 3.8 - class _UnpackAlias(typing._GenericAlias, _root=True): - __class__ = typing.TypeVar - - class _UnpackForm(_ExtensionsSpecialForm, _root=True): - def __getitem__(self, parameters): - item = typing._type_check(parameters, - f'{self._name} accepts only a single type.') - return _UnpackAlias(self, (item,)) - - Unpack = _UnpackForm('Unpack', doc=_UNPACK_DOC) - - def _is_unpack(obj): - return isinstance(obj, _UnpackAlias) - - -if hasattr(typing, "TypeVarTuple"): # 3.11+ - - # Add default parameter - PEP 696 - class TypeVarTuple(metaclass=_TypeVarLikeMeta): - """Type variable tuple.""" - - _backported_typevarlike = typing.TypeVarTuple - - def __new__(cls, name, *, default=_marker): - tvt = typing.TypeVarTuple(name) - _set_default(tvt, default) - _set_module(tvt) - return tvt - - def __init_subclass__(self, *args, **kwds): - raise TypeError("Cannot subclass special typing classes") - -else: # <=3.10 - class TypeVarTuple(_DefaultMixin): - """Type variable tuple. - - Usage:: - - Ts = TypeVarTuple('Ts') - - In the same way that a normal type variable is a stand-in for a single - type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* - type such as ``Tuple[int, str]``. - - Type variable tuples can be used in ``Generic`` declarations. - Consider the following example:: - - class Array(Generic[*Ts]): ... - - The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, - where ``T1`` and ``T2`` are type variables. To use these type variables - as type parameters of ``Array``, we must *unpack* the type variable tuple using - the star operator: ``*Ts``. The signature of ``Array`` then behaves - as if we had simply written ``class Array(Generic[T1, T2]): ...``. - In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows - us to parameterise the class with an *arbitrary* number of type parameters. - - Type variable tuples can be used anywhere a normal ``TypeVar`` can. - This includes class definitions, as shown above, as well as function - signatures and variable annotations:: - - class Array(Generic[*Ts]): - - def __init__(self, shape: Tuple[*Ts]): - self._shape: Tuple[*Ts] = shape - - def get_shape(self) -> Tuple[*Ts]: - return self._shape - - shape = (Height(480), Width(640)) - x: Array[Height, Width] = Array(shape) - y = abs(x) # Inferred type is Array[Height, Width] - z = x + x # ... is Array[Height, Width] - x.get_shape() # ... is tuple[Height, Width] - - """ - - # Trick Generic __parameters__. - __class__ = typing.TypeVar - - def __iter__(self): - yield self.__unpacked__ - - def __init__(self, name, *, default=_marker): - self.__name__ = name - _DefaultMixin.__init__(self, default) - - # for pickling: - def_mod = _caller() - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - self.__unpacked__ = Unpack[self] - - def __repr__(self): - return self.__name__ - - def __hash__(self): - return object.__hash__(self) - - def __eq__(self, other): - return self is other - - def __reduce__(self): - return self.__name__ - - def __init_subclass__(self, *args, **kwds): - if '_root' not in kwds: - raise TypeError("Cannot subclass special typing classes") - - -if hasattr(typing, "reveal_type"): # 3.11+ - reveal_type = typing.reveal_type -else: # <=3.10 - def reveal_type(obj: T, /) -> T: - """Reveal the inferred type of a variable. - - When a static type checker encounters a call to ``reveal_type()``, - it will emit the inferred type of the argument:: - - x: int = 1 - reveal_type(x) - - Running a static type checker (e.g., ``mypy``) on this example - will produce output similar to 'Revealed type is "builtins.int"'. - - At runtime, the function prints the runtime type of the - argument and returns it unchanged. - - """ - print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) - return obj - - -if hasattr(typing, "assert_never"): # 3.11+ - assert_never = typing.assert_never -else: # <=3.10 - def assert_never(arg: Never, /) -> Never: - """Assert to the type checker that a line of code is unreachable. - - Example:: - - def int_or_str(arg: int | str) -> None: - match arg: - case int(): - print("It's an int") - case str(): - print("It's a str") - case _: - assert_never(arg) - - If a type checker finds that a call to assert_never() is - reachable, it will emit an error. - - At runtime, this throws an exception when called. - - """ - raise AssertionError("Expected code to be unreachable") - - -if sys.version_info >= (3, 12): # 3.12+ - # dataclass_transform exists in 3.11 but lacks the frozen_default parameter - dataclass_transform = typing.dataclass_transform -else: # <=3.11 - def dataclass_transform( - *, - eq_default: bool = True, - order_default: bool = False, - kw_only_default: bool = False, - frozen_default: bool = False, - field_specifiers: typing.Tuple[ - typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], - ... - ] = (), - **kwargs: typing.Any, - ) -> typing.Callable[[T], T]: - """Decorator that marks a function, class, or metaclass as providing - dataclass-like behavior. - - Example: - - from typing_extensions import dataclass_transform - - _T = TypeVar("_T") - - # Used on a decorator function - @dataclass_transform() - def create_model(cls: type[_T]) -> type[_T]: - ... - return cls - - @create_model - class CustomerModel: - id: int - name: str - - # Used on a base class - @dataclass_transform() - class ModelBase: ... - - class CustomerModel(ModelBase): - id: int - name: str - - # Used on a metaclass - @dataclass_transform() - class ModelMeta(type): ... - - class ModelBase(metaclass=ModelMeta): ... - - class CustomerModel(ModelBase): - id: int - name: str - - Each of the ``CustomerModel`` classes defined in this example will now - behave similarly to a dataclass created with the ``@dataclasses.dataclass`` - decorator. For example, the type checker will synthesize an ``__init__`` - method. - - The arguments to this decorator can be used to customize this behavior: - - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be - True or False if it is omitted by the caller. - - ``order_default`` indicates whether the ``order`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``kw_only_default`` indicates whether the ``kw_only`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``frozen_default`` indicates whether the ``frozen`` parameter is - assumed to be True or False if it is omitted by the caller. - - ``field_specifiers`` specifies a static list of supported classes - or functions that describe fields, similar to ``dataclasses.field()``. - - At runtime, this decorator records its arguments in the - ``__dataclass_transform__`` attribute on the decorated object. - - See PEP 681 for details. - - """ - def decorator(cls_or_fn): - cls_or_fn.__dataclass_transform__ = { - "eq_default": eq_default, - "order_default": order_default, - "kw_only_default": kw_only_default, - "frozen_default": frozen_default, - "field_specifiers": field_specifiers, - "kwargs": kwargs, - } - return cls_or_fn - return decorator - - -if hasattr(typing, "override"): # 3.12+ - override = typing.override -else: # <=3.11 - _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) - - def override(arg: _F, /) -> _F: - """Indicate that a method is intended to override a method in a base class. - - Usage: - - class Base: - def method(self) -> None: ... - pass - - class Child(Base): - @override - def method(self) -> None: - super().method() - - When this decorator is applied to a method, the type checker will - validate that it overrides a method with the same name on a base class. - This helps prevent bugs that may occur when a base class is changed - without an equivalent change to a child class. - - There is no runtime checking of these properties. The decorator - sets the ``__override__`` attribute to ``True`` on the decorated object - to allow runtime introspection. - - See PEP 698 for details. - - """ - try: - arg.__override__ = True - except (AttributeError, TypeError): - # Skip the attribute silently if it is not writable. - # AttributeError happens if the object has __slots__ or a - # read-only property, TypeError if it's a builtin class. - pass - return arg - - -if hasattr(typing, "deprecated"): - deprecated = typing.deprecated -else: - _T = typing.TypeVar("_T") - - def deprecated( - msg: str, - /, - *, - category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, - stacklevel: int = 1, - ) -> typing.Callable[[_T], _T]: - """Indicate that a class, function or overload is deprecated. - - Usage: - - @deprecated("Use B instead") - class A: - pass - - @deprecated("Use g instead") - def f(): - pass - - @overload - @deprecated("int support is deprecated") - def g(x: int) -> int: ... - @overload - def g(x: str) -> int: ... - - When this decorator is applied to an object, the type checker - will generate a diagnostic on usage of the deprecated object. - - The warning specified by ``category`` will be emitted on use - of deprecated objects. For functions, that happens on calls; - for classes, on instantiation. If the ``category`` is ``None``, - no warning is emitted. The ``stacklevel`` determines where the - warning is emitted. If it is ``1`` (the default), the warning - is emitted at the direct caller of the deprecated object; if it - is higher, it is emitted further up the stack. - - The decorator sets the ``__deprecated__`` - attribute on the decorated object to the deprecation message - passed to the decorator. If applied to an overload, the decorator - must be after the ``@overload`` decorator for the attribute to - exist on the overload as returned by ``get_overloads()``. - - See PEP 702 for details. - - """ - def decorator(arg: _T, /) -> _T: - if category is None: - arg.__deprecated__ = msg - return arg - elif isinstance(arg, type): - original_new = arg.__new__ - has_init = arg.__init__ is not object.__init__ - - @functools.wraps(original_new) - def __new__(cls, *args, **kwargs): - warnings.warn(msg, category=category, stacklevel=stacklevel + 1) - if original_new is not object.__new__: - return original_new(cls, *args, **kwargs) - # Mirrors a similar check in object.__new__. - elif not has_init and (args or kwargs): - raise TypeError(f"{cls.__name__}() takes no arguments") - else: - return original_new(cls) - - arg.__new__ = staticmethod(__new__) - arg.__deprecated__ = __new__.__deprecated__ = msg - return arg - elif callable(arg): - @functools.wraps(arg) - def wrapper(*args, **kwargs): - warnings.warn(msg, category=category, stacklevel=stacklevel + 1) - return arg(*args, **kwargs) - - arg.__deprecated__ = wrapper.__deprecated__ = msg - return wrapper - else: - raise TypeError( - "@deprecated decorator with non-None category must be applied to " - f"a class or callable, not {arg!r}" - ) - - return decorator - - -# We have to do some monkey patching to deal with the dual nature of -# Unpack/TypeVarTuple: -# - We want Unpack to be a kind of TypeVar so it gets accepted in -# Generic[Unpack[Ts]] -# - We want it to *not* be treated as a TypeVar for the purposes of -# counting generic parameters, so that when we subscript a generic, -# the runtime doesn't try to substitute the Unpack with the subscripted type. -if not hasattr(typing, "TypeVarTuple"): - typing._collect_type_vars = _collect_type_vars - typing._check_generic = _check_generic - - -# Backport typing.NamedTuple as it exists in Python 3.13. -# In 3.11, the ability to define generic `NamedTuple`s was supported. -# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. -# On 3.12, we added __orig_bases__ to call-based NamedTuples -# On 3.13, we deprecated kwargs-based NamedTuples -if sys.version_info >= (3, 13): - NamedTuple = typing.NamedTuple -else: - def _make_nmtuple(name, types, module, defaults=()): - fields = [n for n, t in types] - annotations = {n: typing._type_check(t, f"field {n} annotation must be a type") - for n, t in types} - nm_tpl = collections.namedtuple(name, fields, - defaults=defaults, module=module) - nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations - # The `_field_types` attribute was removed in 3.9; - # in earlier versions, it is the same as the `__annotations__` attribute - if sys.version_info < (3, 9): - nm_tpl._field_types = annotations - return nm_tpl - - _prohibited_namedtuple_fields = typing._prohibited - _special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'}) - - class _NamedTupleMeta(type): - def __new__(cls, typename, bases, ns): - assert _NamedTuple in bases - for base in bases: - if base is not _NamedTuple and base is not typing.Generic: - raise TypeError( - 'can only inherit from a NamedTuple type and Generic') - bases = tuple(tuple if base is _NamedTuple else base for base in bases) - types = ns.get('__annotations__', {}) - default_names = [] - for field_name in types: - if field_name in ns: - default_names.append(field_name) - elif default_names: - raise TypeError(f"Non-default namedtuple field {field_name} " - f"cannot follow default field" - f"{'s' if len(default_names) > 1 else ''} " - f"{', '.join(default_names)}") - nm_tpl = _make_nmtuple( - typename, types.items(), - defaults=[ns[n] for n in default_names], - module=ns['__module__'] - ) - nm_tpl.__bases__ = bases - if typing.Generic in bases: - if hasattr(typing, '_generic_class_getitem'): # 3.12+ - nm_tpl.__class_getitem__ = classmethod(typing._generic_class_getitem) - else: - class_getitem = typing.Generic.__class_getitem__.__func__ - nm_tpl.__class_getitem__ = classmethod(class_getitem) - # update from user namespace without overriding special namedtuple attributes - for key in ns: - if key in _prohibited_namedtuple_fields: - raise AttributeError("Cannot overwrite NamedTuple attribute " + key) - elif key not in _special_namedtuple_fields and key not in nm_tpl._fields: - setattr(nm_tpl, key, ns[key]) - if typing.Generic in bases: - nm_tpl.__init_subclass__() - return nm_tpl - - _NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {}) - - def _namedtuple_mro_entries(bases): - assert NamedTuple in bases - return (_NamedTuple,) - - @_ensure_subclassable(_namedtuple_mro_entries) - def NamedTuple(typename, fields=_marker, /, **kwargs): - """Typed version of namedtuple. - - Usage:: - - class Employee(NamedTuple): - name: str - id: int - - This is equivalent to:: - - Employee = collections.namedtuple('Employee', ['name', 'id']) - - The resulting class has an extra __annotations__ attribute, giving a - dict that maps field names to types. (The field names are also in - the _fields attribute, which is part of the namedtuple API.) - An alternative equivalent functional syntax is also accepted:: - - Employee = NamedTuple('Employee', [('name', str), ('id', int)]) - """ - if fields is _marker: - if kwargs: - deprecated_thing = "Creating NamedTuple classes using keyword arguments" - deprecation_msg = ( - "{name} is deprecated and will be disallowed in Python {remove}. " - "Use the class-based or functional syntax instead." - ) - else: - deprecated_thing = "Failing to pass a value for the 'fields' parameter" - example = f"`{typename} = NamedTuple({typename!r}, [])`" - deprecation_msg = ( - "{name} is deprecated and will be disallowed in Python {remove}. " - "To create a NamedTuple class with 0 fields " - "using the functional syntax, " - "pass an empty list, e.g. " - ) + example + "." - elif fields is None: - if kwargs: - raise TypeError( - "Cannot pass `None` as the 'fields' parameter " - "and also specify fields using keyword arguments" - ) - else: - deprecated_thing = "Passing `None` as the 'fields' parameter" - example = f"`{typename} = NamedTuple({typename!r}, [])`" - deprecation_msg = ( - "{name} is deprecated and will be disallowed in Python {remove}. " - "To create a NamedTuple class with 0 fields " - "using the functional syntax, " - "pass an empty list, e.g. " - ) + example + "." - elif kwargs: - raise TypeError("Either list of fields or keywords" - " can be provided to NamedTuple, not both") - if fields is _marker or fields is None: - warnings.warn( - deprecation_msg.format(name=deprecated_thing, remove="3.15"), - DeprecationWarning, - stacklevel=2, - ) - fields = kwargs.items() - nt = _make_nmtuple(typename, fields, module=_caller()) - nt.__orig_bases__ = (NamedTuple,) - return nt - - -if hasattr(collections.abc, "Buffer"): - Buffer = collections.abc.Buffer -else: - class Buffer(abc.ABC): - """Base class for classes that implement the buffer protocol. - - The buffer protocol allows Python objects to expose a low-level - memory buffer interface. Before Python 3.12, it is not possible - to implement the buffer protocol in pure Python code, or even - to check whether a class implements the buffer protocol. In - Python 3.12 and higher, the ``__buffer__`` method allows access - to the buffer protocol from Python code, and the - ``collections.abc.Buffer`` ABC allows checking whether a class - implements the buffer protocol. - - To indicate support for the buffer protocol in earlier versions, - inherit from this ABC, either in a stub file or at runtime, - or use ABC registration. This ABC provides no methods, because - there is no Python-accessible methods shared by pre-3.12 buffer - classes. It is useful primarily for static checks. - - """ - - # As a courtesy, register the most common stdlib buffer classes. - Buffer.register(memoryview) - Buffer.register(bytearray) - Buffer.register(bytes) - - -# Backport of types.get_original_bases, available on 3.12+ in CPython -if hasattr(_types, "get_original_bases"): - get_original_bases = _types.get_original_bases -else: - def get_original_bases(cls, /): - """Return the class's "original" bases prior to modification by `__mro_entries__`. - - Examples:: - - from typing import TypeVar, Generic - from typing_extensions import NamedTuple, TypedDict - - T = TypeVar("T") - class Foo(Generic[T]): ... - class Bar(Foo[int], float): ... - class Baz(list[str]): ... - Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) - Spam = TypedDict("Spam", {"a": int, "b": str}) - - assert get_original_bases(Bar) == (Foo[int], float) - assert get_original_bases(Baz) == (list[str],) - assert get_original_bases(Eggs) == (NamedTuple,) - assert get_original_bases(Spam) == (TypedDict,) - assert get_original_bases(int) == (object,) - """ - try: - return cls.__dict__.get("__orig_bases__", cls.__bases__) - except AttributeError: - raise TypeError( - f'Expected an instance of type, not {type(cls).__name__!r}' - ) from None - - -# NewType is a class on Python 3.10+, making it pickleable -# The error message for subclassing instances of NewType was improved on 3.11+ -if sys.version_info >= (3, 11): - NewType = typing.NewType -else: - class NewType: - """NewType creates simple unique types with almost zero - runtime overhead. NewType(name, tp) is considered a subtype of tp - by static type checkers. At runtime, NewType(name, tp) returns - a dummy callable that simply returns its argument. Usage:: - UserId = NewType('UserId', int) - def name_by_id(user_id: UserId) -> str: - ... - UserId('user') # Fails type check - name_by_id(42) # Fails type check - name_by_id(UserId(42)) # OK - num = UserId(5) + 1 # type: int - """ - - def __call__(self, obj): - return obj - - def __init__(self, name, tp): - self.__qualname__ = name - if '.' in name: - name = name.rpartition('.')[-1] - self.__name__ = name - self.__supertype__ = tp - def_mod = _caller() - if def_mod != 'typing_extensions': - self.__module__ = def_mod - - def __mro_entries__(self, bases): - # We defined __mro_entries__ to get a better error message - # if a user attempts to subclass a NewType instance. bpo-46170 - supercls_name = self.__name__ - - class Dummy: - def __init_subclass__(cls): - subcls_name = cls.__name__ - raise TypeError( - f"Cannot subclass an instance of NewType. " - f"Perhaps you were looking for: " - f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" - ) - - return (Dummy,) - - def __repr__(self): - return f'{self.__module__}.{self.__qualname__}' - - def __reduce__(self): - return self.__qualname__ - - if sys.version_info >= (3, 10): - # PEP 604 methods - # It doesn't make sense to have these methods on Python <3.10 - - def __or__(self, other): - return typing.Union[self, other] - - def __ror__(self, other): - return typing.Union[other, self] - - -if hasattr(typing, "TypeAliasType"): - TypeAliasType = typing.TypeAliasType -else: - def _is_unionable(obj): - """Corresponds to is_unionable() in unionobject.c in CPython.""" - return obj is None or isinstance(obj, ( - type, - _types.GenericAlias, - _types.UnionType, - TypeAliasType, - )) - - class TypeAliasType: - """Create named, parameterized type aliases. - - This provides a backport of the new `type` statement in Python 3.12: - - type ListOrSet[T] = list[T] | set[T] - - is equivalent to: - - T = TypeVar("T") - ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) - - The name ListOrSet can then be used as an alias for the type it refers to. - - The type_params argument should contain all the type parameters used - in the value of the type alias. If the alias is not generic, this - argument is omitted. - - Static type checkers should only support type aliases declared using - TypeAliasType that follow these rules: - - - The first argument (the name) must be a string literal. - - The TypeAliasType instance must be immediately assigned to a variable - of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, - as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). - - """ - - def __init__(self, name: str, value, *, type_params=()): - if not isinstance(name, str): - raise TypeError("TypeAliasType name must be a string") - self.__value__ = value - self.__type_params__ = type_params - - parameters = [] - for type_param in type_params: - if isinstance(type_param, TypeVarTuple): - parameters.extend(type_param) - else: - parameters.append(type_param) - self.__parameters__ = tuple(parameters) - def_mod = _caller() - if def_mod != 'typing_extensions': - self.__module__ = def_mod - # Setting this attribute closes the TypeAliasType from further modification - self.__name__ = name - - def __setattr__(self, name: str, value: object, /) -> None: - if hasattr(self, "__name__"): - self._raise_attribute_error(name) - super().__setattr__(name, value) - - def __delattr__(self, name: str, /) -> Never: - self._raise_attribute_error(name) - - def _raise_attribute_error(self, name: str) -> Never: - # Match the Python 3.12 error messages exactly - if name == "__name__": - raise AttributeError("readonly attribute") - elif name in {"__value__", "__type_params__", "__parameters__", "__module__"}: - raise AttributeError( - f"attribute '{name}' of 'typing.TypeAliasType' objects " - "is not writable" - ) - else: - raise AttributeError( - f"'typing.TypeAliasType' object has no attribute '{name}'" - ) - - def __repr__(self) -> str: - return self.__name__ - - def __getitem__(self, parameters): - if not isinstance(parameters, tuple): - parameters = (parameters,) - parameters = [ - typing._type_check( - item, f'Subscripting {self.__name__} requires a type.' - ) - for item in parameters - ] - return typing._GenericAlias(self, tuple(parameters)) - - def __reduce__(self): - return self.__name__ - - def __init_subclass__(cls, *args, **kwargs): - raise TypeError( - "type 'typing_extensions.TypeAliasType' is not an acceptable base type" - ) - - # The presence of this method convinces typing._type_check - # that TypeAliasTypes are types. - def __call__(self): - raise TypeError("Type alias is not callable") - - if sys.version_info >= (3, 10): - def __or__(self, right): - # For forward compatibility with 3.12, reject Unions - # that are not accepted by the built-in Union. - if not _is_unionable(right): - return NotImplemented - return typing.Union[self, right] - - def __ror__(self, left): - if not _is_unionable(left): - return NotImplemented - return typing.Union[left, self] - - -if hasattr(typing, "is_protocol"): - is_protocol = typing.is_protocol - get_protocol_members = typing.get_protocol_members -else: - def is_protocol(tp: type, /) -> bool: - """Return True if the given type is a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, is_protocol - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> is_protocol(P) - True - >>> is_protocol(int) - False - """ - return ( - isinstance(tp, type) - and getattr(tp, '_is_protocol', False) - and tp is not Protocol - and tp is not typing.Protocol - ) - - def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: - """Return the set of members defined in a Protocol. - - Example:: - - >>> from typing_extensions import Protocol, get_protocol_members - >>> class P(Protocol): - ... def a(self) -> str: ... - ... b: int - >>> get_protocol_members(P) - frozenset({'a', 'b'}) - - Raise a TypeError for arguments that are not Protocols. - """ - if not is_protocol(tp): - raise TypeError(f'{tp!r} is not a Protocol') - if hasattr(tp, '__protocol_attrs__'): - return frozenset(tp.__protocol_attrs__) - return frozenset(_get_protocol_attrs(tp)) - - -if hasattr(typing, "Doc"): - Doc = typing.Doc -else: - class Doc: - """Define the documentation of a type annotation using ``Annotated``, to be - used in class attributes, function and method parameters, return values, - and variables. - - The value should be a positional-only string literal to allow static tools - like editors and documentation generators to use it. - - This complements docstrings. - - The string value passed is available in the attribute ``documentation``. - - Example:: - - >>> from typing_extensions import Annotated, Doc - >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... - """ - def __init__(self, documentation: str, /) -> None: - self.documentation = documentation - - def __repr__(self) -> str: - return f"Doc({self.documentation!r})" - - def __hash__(self) -> int: - return hash(self.documentation) - - def __eq__(self, other: object) -> bool: - if not isinstance(other, Doc): - return NotImplemented - return self.documentation == other.documentation - - -# Aliases for items that have always been in typing. -# Explicitly assign these (rather than using `from typing import *` at the top), -# so that we get a CI error if one of these is deleted from typing.py -# in a future version of Python -AbstractSet = typing.AbstractSet -AnyStr = typing.AnyStr -BinaryIO = typing.BinaryIO -Callable = typing.Callable -Collection = typing.Collection -Container = typing.Container -Dict = typing.Dict -ForwardRef = typing.ForwardRef -FrozenSet = typing.FrozenSet -Generator = typing.Generator -Generic = typing.Generic -Hashable = typing.Hashable -IO = typing.IO -ItemsView = typing.ItemsView -Iterable = typing.Iterable -Iterator = typing.Iterator -KeysView = typing.KeysView -List = typing.List -Mapping = typing.Mapping -MappingView = typing.MappingView -Match = typing.Match -MutableMapping = typing.MutableMapping -MutableSequence = typing.MutableSequence -MutableSet = typing.MutableSet -Optional = typing.Optional -Pattern = typing.Pattern -Reversible = typing.Reversible -Sequence = typing.Sequence -Set = typing.Set -Sized = typing.Sized -TextIO = typing.TextIO -Tuple = typing.Tuple -Union = typing.Union -ValuesView = typing.ValuesView -cast = typing.cast -no_type_check = typing.no_type_check -no_type_check_decorator = typing.no_type_check_decorator diff --git a/requirements-dev.in b/requirements-dev.in new file mode 100644 index 0000000..578ef1e --- /dev/null +++ b/requirements-dev.in @@ -0,0 +1,4 @@ +-r requirements.in + +mypy +ruff>=0.4 diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..fe1302f --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,10 @@ +# This file was autogenerated by uv via the following command: +# uv pip compile requirements-dev.in -o requirements-dev.txt +mypy==1.10.0 + # via -r requirements-dev.in +mypy-extensions==1.0.0 + # via mypy +ruff==0.4.4 + # via -r requirements-dev.in +typing-extensions==4.11.0 + # via mypy diff --git a/requirements.in b/requirements.in index 576967f..e69de29 100644 --- a/requirements.in +++ b/requirements.in @@ -1,2 +0,0 @@ -mypy -ruff>=0.4 diff --git a/requirements.txt b/requirements.txt index 28c4944..e9aaf47 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,2 @@ # This file was autogenerated by uv via the following command: # uv pip compile requirements.in -o requirements.txt -mypy==1.10.0 -mypy-extensions==1.0.0 - # via mypy -ruff==0.4.3 -typing-extensions==4.11.0 - # via mypy