From 5204eb37c0e7ee1d2f5a9efdf1f4713f50d88d14 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Thu, 10 Oct 2024 17:35:03 -0700 Subject: [PATCH 1/4] First round of pyupgrade --- pyanalyze/analysis_lib.py | 3 ++- pyanalyze/annotated_types.py | 3 ++- pyanalyze/annotations.py | 6 +---- pyanalyze/arg_spec.py | 6 ++--- pyanalyze/asynq_checker.py | 3 ++- pyanalyze/attributes.py | 3 ++- pyanalyze/checker.py | 15 ++---------- pyanalyze/error_code.py | 3 ++- pyanalyze/extensions.py | 9 ++++--- pyanalyze/find_unused.py | 3 ++- pyanalyze/format_strings.py | 24 +++++-------------- pyanalyze/functions.py | 3 ++- pyanalyze/implementation.py | 14 ++--------- pyanalyze/importer.py | 5 ++-- pyanalyze/name_check_visitor.py | 14 ++++------- pyanalyze/node_visitor.py | 15 ++---------- pyanalyze/options.py | 5 +--- pyanalyze/patma.py | 13 ++-------- pyanalyze/predicates.py | 3 ++- pyanalyze/runtime.py | 4 ++-- pyanalyze/safe.py | 14 ++--------- pyanalyze/signature.py | 4 +--- pyanalyze/stacked_scopes.py | 4 +--- .../stubs/_pyanalyze_tests-stubs/initnew.pyi | 3 ++- .../_pyanalyze_tests-stubs/paramspec.pyi | 4 +++- .../stubs/pyanalyze-stubs/extensions.pyi | 3 ++- pyanalyze/suggested_type.py | 3 ++- pyanalyze/tests.py | 3 ++- pyanalyze/type_evaluation.py | 15 ++---------- pyanalyze/type_object.py | 3 ++- pyanalyze/typeshed.py | 16 ++----------- pyanalyze/typevar.py | 3 ++- pyanalyze/value.py | 5 +--- pyanalyze/yield_checker.py | 14 ++--------- pyproject.toml | 6 +++++ 35 files changed, 79 insertions(+), 175 deletions(-) diff --git a/pyanalyze/analysis_lib.py b/pyanalyze/analysis_lib.py index 75a6dddc..4d0b5f8b 100644 --- a/pyanalyze/analysis_lib.py +++ b/pyanalyze/analysis_lib.py @@ -10,9 +10,10 @@ import secrets import sys import types +from collections.abc import Mapping from dataclasses import dataclass from pathlib import Path -from typing import Callable, List, Mapping, Optional, Set, Union +from typing import Callable, List, Optional, Set, Union def _all_files( diff --git a/pyanalyze/annotated_types.py b/pyanalyze/annotated_types.py index 13907050..2dde51f4 100644 --- a/pyanalyze/annotated_types.py +++ b/pyanalyze/annotated_types.py @@ -5,9 +5,10 @@ """ import enum +from collections.abc import Iterable from dataclasses import dataclass from datetime import datetime, timezone, tzinfo -from typing import Any, Callable, Iterable, Optional, Type, Union +from typing import Any, Callable, Optional, Type, Union from pyanalyze.value import CanAssign, CanAssignContext, Value, flatten_values diff --git a/pyanalyze/annotations.py b/pyanalyze/annotations.py index 6e72fcb7..5b2e4ff7 100644 --- a/pyanalyze/annotations.py +++ b/pyanalyze/annotations.py @@ -28,19 +28,15 @@ import builtins import contextlib import typing -from collections.abc import Callable, Hashable +from collections.abc import Callable, Container, Generator, Hashable, Mapping, Sequence from dataclasses import InitVar, dataclass, field from typing import ( TYPE_CHECKING, Any, - Container, ContextManager, - Generator, List, - Mapping, NewType, Optional, - Sequence, Set, Tuple, TypeVar, diff --git a/pyanalyze/arg_spec.py b/pyanalyze/arg_spec.py index 92d857ed..514d298b 100644 --- a/pyanalyze/arg_spec.py +++ b/pyanalyze/arg_spec.py @@ -13,19 +13,17 @@ import sys import textwrap import typing +from collections.abc import Iterator, Mapping, Sequence from dataclasses import dataclass, replace +from re import Pattern from types import FunctionType, MethodType, ModuleType from typing import ( Any, Callable, Dict, Generic, - Iterator, List, - Mapping, Optional, - Pattern, - Sequence, Tuple, Type, TypeVar, diff --git a/pyanalyze/asynq_checker.py b/pyanalyze/asynq_checker.py index a0506920..e265ac79 100644 --- a/pyanalyze/asynq_checker.py +++ b/pyanalyze/asynq_checker.py @@ -8,8 +8,9 @@ import contextlib import inspect import types +from collections.abc import Iterator from dataclasses import dataclass, field -from typing import Any, Callable, Iterator, Optional +from typing import Any, Callable, Optional import asynq import qcore diff --git a/pyanalyze/attributes.py b/pyanalyze/attributes.py index 55b50226..47f5c700 100644 --- a/pyanalyze/attributes.py +++ b/pyanalyze/attributes.py @@ -8,9 +8,10 @@ import inspect import sys import types +from collections.abc import Sequence from dataclasses import dataclass from enum import Enum -from typing import Any, Callable, ClassVar, Optional, Sequence, Tuple, Union +from typing import Any, Callable, ClassVar, Optional, Tuple, Union import asynq import qcore diff --git a/pyanalyze/checker.py b/pyanalyze/checker.py index 2e40331b..cefda01a 100644 --- a/pyanalyze/checker.py +++ b/pyanalyze/checker.py @@ -8,21 +8,10 @@ import itertools import sys import types +from collections.abc import Iterable, Iterator, Sequence from contextlib import contextmanager from dataclasses import InitVar, dataclass, field -from typing import ( - Callable, - ContextManager, - Dict, - Iterable, - Iterator, - List, - Optional, - Sequence, - Set, - Tuple, - Union, -) +from typing import Callable, ContextManager, Dict, List, Optional, Set, Tuple, Union import qcore diff --git a/pyanalyze/error_code.py b/pyanalyze/error_code.py index 7d8a649a..39f5d520 100644 --- a/pyanalyze/error_code.py +++ b/pyanalyze/error_code.py @@ -4,8 +4,9 @@ """ +from collections.abc import Iterable, Iterator from dataclasses import dataclass -from typing import Dict, Iterable, Iterator +from typing import Dict import pyanalyze diff --git a/pyanalyze/extensions.py b/pyanalyze/extensions.py index 92f37db8..faf06696 100644 --- a/pyanalyze/extensions.py +++ b/pyanalyze/extensions.py @@ -12,19 +12,18 @@ import enum import typing from collections import defaultdict +from collections.abc import Container, Iterable, Iterator, Sequence from contextlib import contextmanager from dataclasses import dataclass, field from typing import ( TYPE_CHECKING, + Annotated, Any, Callable, - Container, Dict, - Iterable, - Iterator, List, + NoReturn, Optional, - Sequence, Tuple, Type, TypeVar, @@ -33,7 +32,7 @@ from typing import overload as real_overload import typing_extensions -from typing_extensions import Annotated, Literal, NoReturn +from typing_extensions import Literal import pyanalyze diff --git a/pyanalyze/find_unused.py b/pyanalyze/find_unused.py index eecba425..84505654 100644 --- a/pyanalyze/find_unused.py +++ b/pyanalyze/find_unused.py @@ -9,9 +9,10 @@ import enum import inspect from collections import defaultdict +from collections.abc import Iterable from dataclasses import dataclass, field from types import ModuleType, TracebackType -from typing import Dict, Iterable, List, Optional, Set, Type, TypeVar +from typing import Dict, List, Optional, Set, Type, TypeVar import qcore diff --git a/pyanalyze/format_strings.py b/pyanalyze/format_strings.py index d4a18240..afede483 100644 --- a/pyanalyze/format_strings.py +++ b/pyanalyze/format_strings.py @@ -8,20 +8,12 @@ import enum import re from collections import defaultdict +from collections.abc import Iterable, Sequence from dataclasses import dataclass, field -from typing import ( - Callable, - Dict, - Iterable, - List, - Match, - Optional, - Sequence, - Tuple, - Union, -) +from re import Match +from typing import Callable, Dict, List, Optional, Tuple, Union, runtime_checkable -from typing_extensions import Literal, Protocol, runtime_checkable +from typing_extensions import Literal, Protocol from .error_code import ErrorCode from .value import ( @@ -380,13 +372,9 @@ def accept_tuple_args_no_mvv( num_args = len(all_args) num_specifiers = len(specifiers) if num_args < num_specifiers: - yield "too few arguments to format string: got {} but expected {}".format( - num_args, num_specifiers - ) + yield f"too few arguments to format string: got {num_args} but expected {num_specifiers}" elif num_args > num_specifiers: - yield "too many arguments to format string: got {} but expected {}".format( - num_args, num_specifiers - ) + yield f"too many arguments to format string: got {num_args} but expected {num_specifiers}" else: for arg, specifier in zip(all_args, specifiers): yield from specifier.accept(arg, ctx) diff --git a/pyanalyze/functions.py b/pyanalyze/functions.py index c3826b26..990d37b5 100644 --- a/pyanalyze/functions.py +++ b/pyanalyze/functions.py @@ -10,9 +10,10 @@ import enum import sys import types +from collections.abc import Iterable, Sequence from dataclasses import dataclass, replace from itertools import zip_longest -from typing import Iterable, List, Optional, Sequence, Tuple, TypeVar, Union +from typing import List, Optional, Tuple, TypeVar, Union import asynq from typing_extensions import Protocol diff --git a/pyanalyze/implementation.py b/pyanalyze/implementation.py index fd6cd4f9..0d8c1465 100644 --- a/pyanalyze/implementation.py +++ b/pyanalyze/implementation.py @@ -4,19 +4,9 @@ import inspect import re import typing +from collections.abc import Iterable, Sequence from itertools import product -from typing import ( - Callable, - Dict, - Iterable, - NewType, - Optional, - Sequence, - Type, - TypeVar, - Union, - cast, -) +from typing import Callable, Dict, NewType, Optional, Type, TypeVar, Union, cast import qcore import typing_extensions diff --git a/pyanalyze/importer.py b/pyanalyze/importer.py index 28a92d21..2f5eefda 100644 --- a/pyanalyze/importer.py +++ b/pyanalyze/importer.py @@ -7,13 +7,14 @@ import importlib import importlib.util import sys +from collections.abc import Sequence from functools import lru_cache from pathlib import Path from types import ModuleType -from typing import Optional, Sequence, Tuple, cast +from typing import Optional, Tuple, cast -@lru_cache() +@lru_cache def directory_has_init(path: Path) -> bool: return (path / "__init__.py").exists() diff --git a/pyanalyze/name_check_visitor.py b/pyanalyze/name_check_visitor.py index ed14d246..88f8b6bf 100644 --- a/pyanalyze/name_check_visitor.py +++ b/pyanalyze/name_check_visitor.py @@ -27,23 +27,19 @@ import typing from abc import abstractmethod from argparse import ArgumentParser +from collections.abc import Container, Generator, Iterable, Iterator, Mapping, Sequence from dataclasses import dataclass from itertools import chain from pathlib import Path from typing import ( + Annotated, Any, Callable, ClassVar, - Container, ContextManager, Dict, - Generator, - Iterable, - Iterator, List, - Mapping, Optional, - Sequence, Set, Tuple, Type, @@ -56,7 +52,7 @@ import qcore import typeshed_client from qcore.testing import Anything -from typing_extensions import Annotated, Protocol, get_args, get_origin +from typing_extensions import Protocol, get_args, get_origin from . import attributes, format_strings, importer, node_visitor, type_evaluation from .analysis_lib import get_attribute_path @@ -2622,9 +2618,7 @@ def visit_Nonlocal(self, node: ast.Nonlocal) -> None: # this is a SyntaxError, so it might be impossible to reach this branch self._show_error_if_checking( node, - "nonlocal name {} does not exist in any enclosing scope".format( - name - ), + f"nonlocal name {name} does not exist in any enclosing scope", error_code=ErrorCode.bad_nonlocal, ) defining_scope = self.scopes.module_scope() diff --git a/pyanalyze/node_visitor.py b/pyanalyze/node_visitor.py index 07518fb5..96f24783 100644 --- a/pyanalyze/node_visitor.py +++ b/pyanalyze/node_visitor.py @@ -19,24 +19,13 @@ import sys import tempfile from builtins import print as real_print +from collections.abc import Iterable, Iterator, Mapping, Sequence from contextlib import contextmanager from dataclasses import dataclass from enum import Enum from pathlib import Path from types import ModuleType -from typing import ( - Any, - Dict, - Iterable, - Iterator, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, -) +from typing import Any, Dict, List, Optional, Tuple, Type, Union import codemod import qcore diff --git a/pyanalyze/options.py b/pyanalyze/options.py index 37dfaeb7..0fd27a4a 100644 --- a/pyanalyze/options.py +++ b/pyanalyze/options.py @@ -9,19 +9,16 @@ import pathlib import sys from collections import defaultdict +from collections.abc import Collection, Iterable, Mapping, Sequence from dataclasses import dataclass from pathlib import Path from typing import ( Any, ClassVar, - Collection, Dict, FrozenSet, Generic, - Iterable, - Mapping, Optional, - Sequence, Set, Tuple, Type, diff --git a/pyanalyze/patma.py b/pyanalyze/patma.py index 9d4a5473..20662a9e 100644 --- a/pyanalyze/patma.py +++ b/pyanalyze/patma.py @@ -8,18 +8,9 @@ import collections.abc import enum import itertools +from collections.abc import Container, Sequence from dataclasses import dataclass, replace -from typing import ( - Any, - Callable, - Container, - Optional, - Sequence, - Set, - Tuple, - TypeVar, - Union, -) +from typing import Any, Callable, Optional, Set, Tuple, TypeVar, Union import qcore diff --git a/pyanalyze/predicates.py b/pyanalyze/predicates.py index fc11f19f..f1e05040 100644 --- a/pyanalyze/predicates.py +++ b/pyanalyze/predicates.py @@ -6,8 +6,9 @@ import enum import operator +from collections.abc import Sequence from dataclasses import dataclass -from typing import Optional, Sequence +from typing import Optional from .safe import safe_issubclass from .value import ( diff --git a/pyanalyze/runtime.py b/pyanalyze/runtime.py index 22e5d0c8..e666b622 100644 --- a/pyanalyze/runtime.py +++ b/pyanalyze/runtime.py @@ -4,7 +4,7 @@ """ -from functools import lru_cache +from functools import cache from typing import Optional from typing_extensions import deprecated @@ -16,7 +16,7 @@ from .value import CanAssignError, KnownValue -@lru_cache(maxsize=None) +@cache def _get_checker() -> "pyanalyze.checker.Checker": return pyanalyze.checker.Checker() diff --git a/pyanalyze/safe.py b/pyanalyze/safe.py index 40273a4f..3a5911ea 100644 --- a/pyanalyze/safe.py +++ b/pyanalyze/safe.py @@ -7,18 +7,8 @@ import inspect import sys import typing -from typing import ( - Any, - Container, - Dict, - NewType, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, -) +from collections.abc import Container, Sequence +from typing import Any, Dict, NewType, Optional, Tuple, Type, TypeVar, Union import typing_extensions diff --git a/pyanalyze/signature.py b/pyanalyze/signature.py index 962675ea..46dc270e 100644 --- a/pyanalyze/signature.py +++ b/pyanalyze/signature.py @@ -11,6 +11,7 @@ import enum import inspect import itertools +from collections.abc import Container, Iterable, Sequence from dataclasses import dataclass, field, replace from types import FunctionType, MethodType from typing import ( @@ -18,13 +19,10 @@ Any, Callable, ClassVar, - Container, Dict, - Iterable, List, NamedTuple, Optional, - Sequence, Set, Tuple, TypeVar, diff --git a/pyanalyze/stacked_scopes.py b/pyanalyze/stacked_scopes.py index da808c03..adc46f70 100644 --- a/pyanalyze/stacked_scopes.py +++ b/pyanalyze/stacked_scopes.py @@ -25,6 +25,7 @@ import enum from ast import AST from collections import OrderedDict, defaultdict +from collections.abc import Iterable, Iterator, Sequence from dataclasses import dataclass, field, replace from itertools import chain from types import ModuleType @@ -34,12 +35,9 @@ ContextManager, Dict, FrozenSet, - Iterable, - Iterator, List, NamedTuple, Optional, - Sequence, Set, Tuple, Type, diff --git a/pyanalyze/stubs/_pyanalyze_tests-stubs/initnew.pyi b/pyanalyze/stubs/_pyanalyze_tests-stubs/initnew.pyi index fbcacd63..b9a03b43 100644 --- a/pyanalyze/stubs/_pyanalyze_tests-stubs/initnew.pyi +++ b/pyanalyze/stubs/_pyanalyze_tests-stubs/initnew.pyi @@ -1,4 +1,5 @@ -from typing import Generic, Iterable, Iterator, TypeVar, overload +from collections.abc import Iterable, Iterator +from typing import Generic, TypeVar, overload _T = TypeVar("_T") diff --git a/pyanalyze/stubs/_pyanalyze_tests-stubs/paramspec.pyi b/pyanalyze/stubs/_pyanalyze_tests-stubs/paramspec.pyi index 2027ba17..f826d890 100644 --- a/pyanalyze/stubs/_pyanalyze_tests-stubs/paramspec.pyi +++ b/pyanalyze/stubs/_pyanalyze_tests-stubs/paramspec.pyi @@ -1,4 +1,6 @@ -from typing_extensions import Callable, ParamSpec, TypeVar +from typing import Callable + +from typing_extensions import ParamSpec, TypeVar T = TypeVar("T") P = ParamSpec("P") diff --git a/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi b/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi index 3f7431a5..172e7b2f 100644 --- a/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi +++ b/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi @@ -2,7 +2,8 @@ # from it, because typeshed_client doesn't let # stubs import from non-stub files. -from typing import Any, Callable, List, Optional, Sequence +from collections.abc import Sequence +from typing import Any, Callable, List, Optional def reveal_type(value: object) -> None: ... def get_overloads(fully_qualified_name: str) -> List[Callable[..., Any]]: ... diff --git a/pyanalyze/suggested_type.py b/pyanalyze/suggested_type.py index 9ae5f7c2..823cc108 100644 --- a/pyanalyze/suggested_type.py +++ b/pyanalyze/suggested_type.py @@ -6,9 +6,10 @@ import ast from collections import defaultdict +from collections.abc import Iterator, Mapping, Sequence from dataclasses import dataclass, field from types import FunctionType -from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union from .error_code import ErrorCode from .node_visitor import ErrorContext, Failure diff --git a/pyanalyze/tests.py b/pyanalyze/tests.py index aea71abe..23ee8971 100644 --- a/pyanalyze/tests.py +++ b/pyanalyze/tests.py @@ -5,7 +5,8 @@ """ -from typing import ClassVar, NoReturn, Sequence, Union, overload +from collections.abc import Sequence +from typing import ClassVar, NoReturn, Union, overload import qcore from asynq import AsyncTask, ConstFuture, async_proxy, asynq, get_async_fn, result diff --git a/pyanalyze/type_evaluation.py b/pyanalyze/type_evaluation.py index 37cc072e..4cea92d3 100644 --- a/pyanalyze/type_evaluation.py +++ b/pyanalyze/type_evaluation.py @@ -8,21 +8,10 @@ import contextlib import operator import sys +from collections.abc import Iterator, Mapping, Sequence from contextlib import contextmanager from dataclasses import dataclass, field -from typing import ( - Any, - Callable, - Dict, - Iterator, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, -) +from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union import qcore from typing_extensions import Literal diff --git a/pyanalyze/type_object.py b/pyanalyze/type_object.py index 919a8d9a..a0359507 100644 --- a/pyanalyze/type_object.py +++ b/pyanalyze/type_object.py @@ -6,8 +6,9 @@ import collections.abc import inspect +from collections.abc import Container, Sequence from dataclasses import dataclass, field -from typing import Callable, Container, Dict, Sequence, Set, Union, cast +from typing import Callable, Dict, Set, Union, cast from unittest import mock from pyanalyze.signature import ( diff --git a/pyanalyze/typeshed.py b/pyanalyze/typeshed.py index b5cd1e42..540735df 100644 --- a/pyanalyze/typeshed.py +++ b/pyanalyze/typeshed.py @@ -12,24 +12,12 @@ import sys import types from abc import abstractmethod -from collections.abc import Collection, MutableMapping +from collections.abc import Collection, Iterable, MutableMapping, Sequence from collections.abc import Set as AbstractSet from dataclasses import dataclass, field, replace from enum import EnumMeta from types import GeneratorType, MethodDescriptorType, ModuleType -from typing import ( - Any, - Dict, - Generic, - Iterable, - List, - Optional, - Sequence, - Set, - Tuple, - TypeVar, - Union, -) +from typing import Any, Dict, Generic, List, Optional, Set, Tuple, TypeVar, Union import qcore import typeshed_client diff --git a/pyanalyze/typevar.py b/pyanalyze/typevar.py index a1309b79..2664cb09 100644 --- a/pyanalyze/typevar.py +++ b/pyanalyze/typevar.py @@ -4,7 +4,8 @@ """ -from typing import Iterable, Sequence, Tuple, Union +from collections.abc import Iterable, Sequence +from typing import Tuple, Union import qcore diff --git a/pyanalyze/value.py b/pyanalyze/value.py index 82d4c185..a9e64dcc 100644 --- a/pyanalyze/value.py +++ b/pyanalyze/value.py @@ -24,6 +24,7 @@ def function(x: int, y: list[int], z: Any): import sys import textwrap from collections import deque +from collections.abc import Iterable, Iterator, Mapping, Sequence from dataclasses import InitVar, dataclass, field from itertools import chain from types import FunctionType, ModuleType @@ -32,12 +33,8 @@ def function(x: int, y: list[int], z: Any): Callable, ContextManager, Dict, - Iterable, - Iterator, List, - Mapping, Optional, - Sequence, Set, Tuple, Type, diff --git a/pyanalyze/yield_checker.py b/pyanalyze/yield_checker.py index 99dcd4ca..66687803 100644 --- a/pyanalyze/yield_checker.py +++ b/pyanalyze/yield_checker.py @@ -13,19 +13,9 @@ import contextlib import itertools import logging +from collections.abc import Iterator, Sequence from dataclasses import dataclass, field -from typing import ( - Any, - Callable, - ContextManager, - Dict, - Iterator, - List, - Optional, - Sequence, - Set, - Tuple, -) +from typing import Any, Callable, ContextManager, Dict, List, Optional, Set, Tuple import asynq import qcore diff --git a/pyproject.toml b/pyproject.toml index 36c2109b..df4ea726 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,6 +51,7 @@ select = [ "F", "E", "I", # import sorting + "UP", ] ignore = [ @@ -70,3 +71,8 @@ ignore = [ "E731", # do not assign a lambda expression, use a def "E741", # ambiguous variable name ] + +[tool.ruff.lint.per-file-ignores] +"pyanalyze/test_*.py" = [ + "UP", # Want to test old-style code +] From 6b7b3970f25214f6187d16ef416870e23390d4a6 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Thu, 10 Oct 2024 17:37:16 -0700 Subject: [PATCH 2/4] Unsafe fixes --- pyanalyze/analysis_lib.py | 12 +- pyanalyze/annotated_types.py | 4 +- pyanalyze/annotations.py | 15 +-- pyanalyze/arg_spec.py | 23 +--- pyanalyze/ast_annotator.py | 8 +- pyanalyze/asynq_checker.py | 5 +- pyanalyze/attributes.py | 8 +- pyanalyze/checker.py | 26 ++--- pyanalyze/error_code.py | 3 +- pyanalyze/extensions.py | 22 ++-- pyanalyze/find_unused.py | 14 +-- pyanalyze/format_strings.py | 14 +-- pyanalyze/functions.py | 8 +- pyanalyze/implementation.py | 17 +-- pyanalyze/importer.py | 4 +- pyanalyze/name_check_visitor.py | 110 ++++++++---------- pyanalyze/node_visitor.py | 79 ++++++------- pyanalyze/options.py | 45 +++---- pyanalyze/patma.py | 12 +- pyanalyze/reexport.py | 8 +- pyanalyze/safe.py | 12 +- pyanalyze/signature.py | 60 +++++----- pyanalyze/stacked_scopes.py | 80 ++++++------- .../stubs/_pyanalyze_tests-stubs/args.pyi | 4 +- .../_pyanalyze_tests-stubs/recursion.pyi | 4 +- .../stubs/pyanalyze-stubs/extensions.pyi | 6 +- pyanalyze/suggested_type.py | 16 +-- pyanalyze/type_evaluation.py | 34 +++--- pyanalyze/type_object.py | 10 +- pyanalyze/typeshed.py | 32 ++--- pyanalyze/typevar.py | 4 +- pyanalyze/value.py | 78 ++++++------- pyanalyze/yield_checker.py | 22 ++-- 33 files changed, 357 insertions(+), 442 deletions(-) diff --git a/pyanalyze/analysis_lib.py b/pyanalyze/analysis_lib.py index 4d0b5f8b..314fb995 100644 --- a/pyanalyze/analysis_lib.py +++ b/pyanalyze/analysis_lib.py @@ -13,12 +13,12 @@ from collections.abc import Mapping from dataclasses import dataclass from pathlib import Path -from typing import Callable, List, Optional, Set, Union +from typing import Callable, Optional, Union def _all_files( root: Union[str, Path], filter_function: Optional[Callable[[str], bool]] = None -) -> Set[str]: +) -> set[str]: """Returns the set of all files at the given root. Filtered optionally by the filter_function. @@ -35,7 +35,7 @@ def _all_files( def files_with_extension_from_directory( extension: str, dirname: Union[str, Path] -) -> Set[str]: +) -> set[str]: """Finds all files in a given directory with this extension.""" return _all_files(dirname, filter_function=lambda fn: fn.endswith("." + extension)) @@ -49,8 +49,8 @@ def get_indentation(line: str) -> int: def get_line_range_for_node( - node: Union[ast.stmt, ast.expr], lines: List[str] -) -> List[int]: + node: Union[ast.stmt, ast.expr], lines: list[str] +) -> list[int]: """Returns the lines taken up by a Python ast node. lines is a list of code lines for the file the node came from. @@ -137,7 +137,7 @@ def is_positional_only_arg_name(name: str, class_name: Optional[str] = None) -> return name.startswith("__") and not name.endswith("__") -def get_attribute_path(node: ast.AST) -> Optional[List[str]]: +def get_attribute_path(node: ast.AST) -> Optional[list[str]]: """Gets the full path of an attribute lookup. For example, the code string "a.model.question.Question" will resolve to the path diff --git a/pyanalyze/annotated_types.py b/pyanalyze/annotated_types.py index 2dde51f4..b263bf59 100644 --- a/pyanalyze/annotated_types.py +++ b/pyanalyze/annotated_types.py @@ -8,7 +8,7 @@ from collections.abc import Iterable from dataclasses import dataclass from datetime import datetime, timezone, tzinfo -from typing import Any, Callable, Optional, Type, Union +from typing import Any, Callable, Optional, Union from pyanalyze.value import CanAssign, CanAssignContext, Value, flatten_values @@ -127,7 +127,7 @@ def can_assign_non_literal(self, value: Value) -> CanAssign: @dataclass(frozen=True) class EnumName(AnnotatedTypesCheck): - enum_cls: Type[enum.Enum] + enum_cls: type[enum.Enum] def predicate(self, value: str) -> bool: try: diff --git a/pyanalyze/annotations.py b/pyanalyze/annotations.py index 5b2e4ff7..54455fd3 100644 --- a/pyanalyze/annotations.py +++ b/pyanalyze/annotations.py @@ -34,11 +34,8 @@ TYPE_CHECKING, Any, ContextManager, - List, NewType, Optional, - Set, - Tuple, TypeVar, Union, cast, @@ -140,7 +137,7 @@ class Context: should_suppress_undefined_names: bool = field(default=False, init=False) """While this is True, no errors are shown for undefined names.""" - _being_evaluated: Set[int] = field(default_factory=set, init=False) + _being_evaluated: set[int] = field(default_factory=set, init=False) def suppress_undefined_names(self) -> ContextManager[None]: """Temporarily suppress errors about undefined names.""" @@ -795,7 +792,7 @@ def _type_from_subscripted_value( ctx.show_error("Optional[] takes only one argument") return AnyValue(AnySource.error) return unite_values(KnownValue(None), _type_from_value(members[0], ctx)) - elif root is typing.Type or root is type: + elif root is type or root is type: if len(members) != 1: ctx.show_error("Type[] takes only one argument") return AnyValue(AnySource.error) @@ -957,7 +954,7 @@ def get_type_alias( @dataclass(frozen=True) class _SubscriptedValue(Value): root: Optional[Value] - members: Tuple[Value, ...] + members: tuple[Value, ...] @dataclass(frozen=True) @@ -969,7 +966,7 @@ class TypeQualifierValue(Value): @dataclass(frozen=True) class DecoratorValue(Value): decorator: object - args: Tuple[Value, ...] + args: tuple[Value, ...] class _Visitor(ast.NodeVisitor): @@ -1160,7 +1157,7 @@ def _value_of_origin_args( is_typeddict: bool = False, allow_unpack: bool = False, ) -> Value: - if origin is typing.Type or origin is type: + if origin is type or origin is type: if not args: return TypedValue(type) return SubclassValue.make(_type_from_runtime(args[0], ctx)) @@ -1385,7 +1382,7 @@ def _make_callable_from_value( def _make_annotated(origin: Value, metadata: Sequence[Value], ctx: Context) -> Value: - metadata_objs: List[Union[Value, Extension]] = [] + metadata_objs: list[Union[Value, Extension]] = [] for entry in metadata: if isinstance(entry, KnownValue): if isinstance(entry.val, ParameterTypeGuard): diff --git a/pyanalyze/arg_spec.py b/pyanalyze/arg_spec.py index 514d298b..7c97a785 100644 --- a/pyanalyze/arg_spec.py +++ b/pyanalyze/arg_spec.py @@ -17,18 +17,7 @@ from dataclasses import dataclass, replace from re import Pattern from types import FunctionType, MethodType, ModuleType -from typing import ( - Any, - Callable, - Dict, - Generic, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) +from typing import Any, Callable, Generic, Optional, TypeVar, Union from unittest import mock import asynq @@ -344,7 +333,7 @@ def unwrap(cls, typ: type, options: Options) -> type: # specify the overloads. This will be unnecessary in 3.11+ # where we get to use typing.get_overloads(). def _raises_overload1( - expected_exception: Union[Type[_E], Tuple[Type[_E], ...]], + expected_exception: Union[type[_E], tuple[type[_E], ...]], *, match: Optional[Union[str, Pattern[str]]] = ..., ) -> _pytest.python_api.RaisesContext[_E]: @@ -352,7 +341,7 @@ def _raises_overload1( # TODO use ParamSpec here def _raises_overload2( - expected_exception: Union[Type[_E], Tuple[Type[_E], ...]], + expected_exception: Union[type[_E], tuple[type[_E], ...]], func: Callable[..., Any], *args: Any, **kwargs: Any, @@ -361,7 +350,7 @@ def _raises_overload2( def _get_pytest_signatures( arg_spec_cache: "ArgSpecCache", - ) -> Dict[object, ConcreteSignature]: + ) -> dict[object, ConcreteSignature]: """Return hardcoded Signatures for specific pytest functions.""" sigs = [ arg_spec_cache.get_concrete_signature(_raises_overload1), @@ -484,7 +473,7 @@ def _make_sig_parameter( is_wrapped: bool, index: int, seen_paramspec_args: Optional[ParamSpecArgsValue], - ) -> Tuple[Optional[SigParameter], bool, Optional[ParamSpecArgsValue]]: + ) -> tuple[Optional[SigParameter], bool, Optional[ParamSpecArgsValue]]: """Given an inspect.Parameter, returns a Parameter object.""" if is_wrapped: typ = AnyValue(AnySource.inference) @@ -1016,7 +1005,7 @@ def _safe_get_signature(self, obj: Any) -> Optional[inspect.Signature]: # Python 2. return None - def get_type_parameters(self, typ: Union[type, str]) -> List[Value]: + def get_type_parameters(self, typ: Union[type, str]) -> list[Value]: bases = self.get_generic_bases(typ, substitute_typevars=False) tv_map = bases.get(typ, {}) return [tv for tv in tv_map.values()] diff --git a/pyanalyze/ast_annotator.py b/pyanalyze/ast_annotator.py index 3f433e05..4d83047b 100644 --- a/pyanalyze/ast_annotator.py +++ b/pyanalyze/ast_annotator.py @@ -13,7 +13,7 @@ import textwrap import traceback import types -from typing import Optional, Type, Union +from typing import Optional, Union from .analysis_lib import make_module from .error_code import ErrorCode @@ -26,7 +26,7 @@ def annotate_code( code: str, *, - visitor_cls: Type[NameCheckVisitor] = NameCheckVisitor, + visitor_cls: type[NameCheckVisitor] = NameCheckVisitor, dump: bool = False, show_errors: bool = False, verbose: bool = False, @@ -73,7 +73,7 @@ def annotate_code( def annotate_file( path: Union[str, "os.PathLike[str]"], *, - visitor_cls: Type[NameCheckVisitor] = NameCheckVisitor, + visitor_cls: type[NameCheckVisitor] = NameCheckVisitor, verbose: bool = False, dump: bool = False, show_errors: bool = False, @@ -159,7 +159,7 @@ def _annotate_module( module: Optional[types.ModuleType], tree: ast.Module, code_str: str, - visitor_cls: Type[NameCheckVisitor], + visitor_cls: type[NameCheckVisitor], show_errors: bool = False, ) -> None: """Annotate the AST for a module with inferred values. diff --git a/pyanalyze/asynq_checker.py b/pyanalyze/asynq_checker.py index e265ac79..5849fc43 100644 --- a/pyanalyze/asynq_checker.py +++ b/pyanalyze/asynq_checker.py @@ -119,8 +119,7 @@ def check_call(self, value: Value, node: ast.Call) -> None: replacement_node = None self._show_impure_async_error( node, - replacement_call="%s.%s_async" - % (_stringify_obj(inner_type), value.attr_name), + replacement_call=f"{_stringify_obj(inner_type)}.{value.attr_name}_async", replacement_node=replacement_node, ) @@ -233,7 +232,7 @@ def _stringify_obj(obj: Any) -> str: return f"{_stringify_obj(cls)}.{obj.decorator.fn.__name__}" elif isinstance(obj, super): # self might not always be correct, but it's close enough - return "super(%s, self)" % _stringify_obj(obj.__self_class__) + return f"super({_stringify_obj(obj.__self_class__)}, self)" else: return f"{obj.__module__}.{obj.__name__}" diff --git a/pyanalyze/attributes.py b/pyanalyze/attributes.py index 47f5c700..80d4b9df 100644 --- a/pyanalyze/attributes.py +++ b/pyanalyze/attributes.py @@ -11,7 +11,7 @@ from collections.abc import Sequence from dataclasses import dataclass from enum import Enum -from typing import Any, Callable, ClassVar, Optional, Tuple, Union +from typing import Any, Callable, ClassVar, Optional, Union import asynq import qcore @@ -82,7 +82,7 @@ def get_attribute_from_typeshed(self, typ: type, *, on_class: bool) -> Value: def get_attribute_from_typeshed_recursively( self, fq_name: str, *, on_class: bool - ) -> Tuple[Value, object]: + ) -> tuple[Value, object]: return UNINITIALIZED_VALUE, None def should_ignore_none_attributes(self) -> bool: @@ -211,7 +211,7 @@ def should_treat_as_any(cls, val: object, options: Options) -> bool: return any(func(val) for func in option_value) -_CAT = Callable[[object], Optional[Tuple[Value, Value]]] +_CAT = Callable[[object], Optional[tuple[Value, Value]]] class ClassAttributeTransformer(PyObjectSequenceOption[_CAT]): @@ -503,7 +503,7 @@ def get_signature(self, callable: object) -> MaybeSignature: def _get_attribute_from_mro( typ: object, ctx: AttrContext, on_class: bool -) -> Tuple[Value, object, bool]: +) -> tuple[Value, object, bool]: # Then go through the MRO and find base classes that may define the attribute. if safe_isinstance(typ, type) and safe_issubclass(typ, Enum): # Special case, to avoid picking an attribute of Enum instances (e.g., name) diff --git a/pyanalyze/checker.py b/pyanalyze/checker.py index cefda01a..837ae612 100644 --- a/pyanalyze/checker.py +++ b/pyanalyze/checker.py @@ -11,7 +11,7 @@ from collections.abc import Iterable, Iterator, Sequence from contextlib import contextmanager from dataclasses import InitVar, dataclass, field -from typing import Callable, ContextManager, Dict, List, Optional, Set, Tuple, Union +from typing import Callable, ContextManager, Optional, Union import qcore @@ -55,7 +55,7 @@ unite_values, ) -_BaseProvider = Callable[[Union[type, super]], Set[type]] +_BaseProvider = Callable[[Union[type, super]], set[type]] class AdditionalBaseProviders(PyObjectSequenceOption[_BaseProvider]): @@ -84,14 +84,14 @@ class Checker: ts_finder: TypeshedFinder = field(init=False) reexport_tracker: ImplicitReexportTracker = field(init=False) callable_tracker: CallableTracker = field(init=False) - type_object_cache: Dict[Union[type, super, str], TypeObject] = field( + type_object_cache: dict[Union[type, super, str], TypeObject] = field( default_factory=dict, init=False, repr=False ) - assumed_compatibilities: List[Tuple[TypeObject, TypeObject]] = field( + assumed_compatibilities: list[tuple[TypeObject, TypeObject]] = field( default_factory=list ) - vnv_map: Dict[str, VariableNameValue] = field(default_factory=dict) - type_alias_cache: Dict[object, TypeAlias] = field(default_factory=dict) + vnv_map: dict[str, VariableNameValue] = field(default_factory=dict) + type_alias_cache: dict[object, TypeAlias] = field(default_factory=dict) _should_exclude_any: bool = False _has_used_any_match: bool = False @@ -119,10 +119,10 @@ def maybe_get_variable_name_value( ) -> Optional[VariableNameValue]: return VariableNameValue.from_varname(varname, self.vnv_map) - def perform_final_checks(self) -> List[Failure]: + def perform_final_checks(self) -> list[Failure]: return self.callable_tracker.check() - def get_additional_bases(self, typ: Union[type, super]) -> Set[type]: + def get_additional_bases(self, typ: Union[type, super]) -> set[type]: bases = set() for provider in self.options.get_value_for(AdditionalBaseProviders): bases |= provider(typ) @@ -183,7 +183,7 @@ def _build_type_object(self, typ: Union[type, super, str]) -> TypeObject: def _get_recursive_typeshed_bases( self, typ: Union[type, str] - ) -> Set[Union[type, str]]: + ) -> set[Union[type, str]]: seen = set() to_do = {typ} result = set() @@ -197,11 +197,11 @@ def _get_recursive_typeshed_bases( seen.add(typ) return result - def _get_typeshed_bases(self, typ: Union[type, str]) -> Set[Union[type, str]]: + def _get_typeshed_bases(self, typ: Union[type, str]) -> set[Union[type, str]]: base_values = self.ts_finder.get_bases_recursively(typ) return {base.typ for base in base_values if isinstance(base, TypedValue)} - def _get_protocol_members(self, bases: Iterable[Union[type, str]]) -> Set[str]: + def _get_protocol_members(self, bases: Iterable[Union[type, str]]) -> set[str]: return set( itertools.chain.from_iterable( self.ts_finder.get_all_attributes(base) for base in bases @@ -442,7 +442,7 @@ def get_attribute_from_value( } -def _extract_protocol_members(typ: type) -> Set[str]: +def _extract_protocol_members(typ: type) -> set[str]: if ( typ is object or is_typing_name(typ, "Generic") @@ -468,7 +468,7 @@ def get_attribute_from_typeshed(self, typ: type, *, on_class: bool) -> Value: def get_attribute_from_typeshed_recursively( self, fq_name: str, *, on_class: bool - ) -> Tuple[Value, object]: + ) -> tuple[Value, object]: return self.checker.ts_finder.get_attribute_recursively( fq_name, self.attr, on_class=on_class ) diff --git a/pyanalyze/error_code.py b/pyanalyze/error_code.py index 39f5d520..fe044d21 100644 --- a/pyanalyze/error_code.py +++ b/pyanalyze/error_code.py @@ -6,7 +6,6 @@ from collections.abc import Iterable, Iterator from dataclasses import dataclass -from typing import Dict import pyanalyze @@ -21,7 +20,7 @@ class Error: class ErrorRegistry: - errors: Dict[str, Error] + errors: dict[str, Error] def __init__(self, errors: Iterable[Error]) -> None: self.errors = {} diff --git a/pyanalyze/extensions.py b/pyanalyze/extensions.py index faf06696..d47acb4a 100644 --- a/pyanalyze/extensions.py +++ b/pyanalyze/extensions.py @@ -20,12 +20,8 @@ Annotated, Any, Callable, - Dict, - List, NoReturn, Optional, - Tuple, - Type, TypeVar, Union, ) @@ -171,7 +167,7 @@ def can_assign(self, value: "Value", ctx: "CanAssignContext") -> "CanAssign": class _AsynqCallableMeta(type): def __getitem__( - self, params: Tuple[Union[Literal[Ellipsis], List[object]], object] + self, params: tuple[Union[Literal[Ellipsis], list[object]], object] ) -> Any: if not isinstance(params, tuple) or len(params) != 2: raise TypeError( @@ -201,7 +197,7 @@ class AsynqCallable(metaclass=_AsynqCallableMeta): """ - args: Union[Literal[Ellipsis], Tuple[object, ...]] + args: Union[Literal[Ellipsis], tuple[object, ...]] return_type: object # Returns AsynqCallable but pyanalyze interprets that as AsynqCallable[..., Any] @@ -230,7 +226,7 @@ def replace_type(arg: object) -> object: return AsynqCallable(new_args, new_return_type) @property - def __parameters__(self) -> Tuple["TypeVar", ...]: + def __parameters__(self) -> tuple["TypeVar", ...]: params = [] for arg in self._inner_types: if isinstance(arg, TypeVar): @@ -250,7 +246,7 @@ def __call__(self, *args: Any, **kwargs: Any) -> Any: class _ParameterGuardMeta(type): - def __getitem__(self, params: Tuple[str, object]) -> Any: + def __getitem__(self, params: tuple[str, object]) -> Any: if not isinstance(params, tuple) or len(params) != 2: raise TypeError( f"{self.__name__}[...] should be instantiated " @@ -294,7 +290,7 @@ def assert_is_int(arg: object) -> Annotated[bool, NoReturnGuard["arg", int]]: class _HasAttrGuardMeta(type): - def __getitem__(self, params: Tuple[str, str, object]) -> "HasAttrGuard": + def __getitem__(self, params: tuple[str, str, object]) -> "HasAttrGuard": if not isinstance(params, tuple) or len(params) != 3: raise TypeError( "HasAttrGuard[...] should be instantiated " @@ -503,11 +499,11 @@ def decorator(__arg: _T) -> _T: return decorator -_overloads: Dict[str, List[Callable[..., Any]]] = defaultdict(list) -_type_evaluations: Dict[str, List[Callable[..., Any]]] = defaultdict(list) +_overloads: dict[str, list[Callable[..., Any]]] = defaultdict(list) +_type_evaluations: dict[str, list[Callable[..., Any]]] = defaultdict(list) -def get_overloads(fully_qualified_name: str) -> List[Callable[..., Any]]: +def get_overloads(fully_qualified_name: str) -> list[Callable[..., Any]]: """Return all defined runtime overloads for this fully qualified name.""" return _overloads[fully_qualified_name] @@ -644,7 +640,7 @@ class _EnumName: """ # TODO after dropping 3.8: switch to a single class with __class_getitem__ - def __getitem__(self, enum_cls: Type[enum.Enum]) -> Any: + def __getitem__(self, enum_cls: type[enum.Enum]) -> Any: return Annotated[str, pyanalyze.annotated_types.EnumName(enum_cls)] diff --git a/pyanalyze/find_unused.py b/pyanalyze/find_unused.py index 84505654..c4fc845f 100644 --- a/pyanalyze/find_unused.py +++ b/pyanalyze/find_unused.py @@ -12,7 +12,7 @@ from collections.abc import Iterable from dataclasses import dataclass, field from types import ModuleType, TracebackType -from typing import Dict, List, Optional, Set, Type, TypeVar +from typing import Optional, TypeVar import qcore @@ -102,17 +102,17 @@ class UnusedObjectFinder: enabled: bool = False print_output: bool = True print_all: bool = False - usages: Dict[ModuleType, Dict[str, Set[str]]] = field( + usages: dict[ModuleType, dict[str, set[str]]] = field( default_factory=lambda: defaultdict(lambda: defaultdict(set)), init=False ) - import_stars: Dict[ModuleType, Set[ModuleType]] = field( + import_stars: dict[ModuleType, set[ModuleType]] = field( default_factory=lambda: defaultdict(set), init=False ) - module_to_import_stars: Dict[ModuleType, Set[ModuleType]] = field( + module_to_import_stars: dict[ModuleType, set[ModuleType]] = field( default_factory=lambda: defaultdict(set), init=False ) - visited_modules: List[ModuleType] = field(default_factory=list) - recursive_stack: Set[ModuleType] = field(default_factory=set) + visited_modules: list[ModuleType] = field(default_factory=list) + recursive_stack: set[ModuleType] = field(default_factory=set) def __post_init__(self) -> None: if self.options is None: @@ -126,7 +126,7 @@ def __enter__(self) -> Optional["UnusedObjectFinder"]: def __exit__( self, - exc_typ: Optional[Type[BaseException]], + exc_typ: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: diff --git a/pyanalyze/format_strings.py b/pyanalyze/format_strings.py index afede483..4834123b 100644 --- a/pyanalyze/format_strings.py +++ b/pyanalyze/format_strings.py @@ -11,7 +11,7 @@ from collections.abc import Iterable, Sequence from dataclasses import dataclass, field from re import Match -from typing import Callable, Dict, List, Optional, Tuple, Union, runtime_checkable +from typing import Callable, Optional, Union, runtime_checkable from typing_extensions import Literal, Protocol @@ -295,7 +295,7 @@ def accept(self, args: Value, ctx: CanAssignContext) -> Iterable[str]: else: yield from self.accept_tuple_args(args, ctx) - def get_specifier_mapping(self) -> Dict[str, List[ConversionSpecifier]]: + def get_specifier_mapping(self) -> dict[str, list[ConversionSpecifier]]: """Return a mapping from mapping key to conversion specifiers for that mapping key.""" out = defaultdict(list) for specifier in self.specifiers: @@ -387,7 +387,7 @@ def check_string_format( args: Value, on_error: Callable[..., None], ctx: CanAssignContext, -) -> Tuple[Value, Optional[ast.expr]]: +) -> tuple[Value, Optional[ast.expr]]: """Checks that arguments to %-formatted strings are correct.""" if isinstance(format_str, bytes): fs = PercentFormatString.from_bytes_pattern(format_str) @@ -470,8 +470,8 @@ def _is_simple_enough(node: ast.AST) -> bool: # .format() # -FormatErrors = List[Tuple[int, str]] -Children = List[Union[str, "ReplacementField"]] +FormatErrors = list[tuple[int, str]] +Children = list[Union[str, "ReplacementField"]] @dataclass @@ -530,7 +530,7 @@ class IndexOrAttribute(enum.Enum): @dataclass class ReplacementField: arg_name: Union[None, int, str] - index_attribute: Sequence[Tuple[IndexOrAttribute, str]] = () + index_attribute: Sequence[tuple[IndexOrAttribute, str]] = () conversion: Optional[str] = None format_spec: Optional[FormatString] = None @@ -543,7 +543,7 @@ def iter_replacement_fields(self) -> Iterable["ReplacementField"]: yield from child.iter_replacement_fields() -def parse_format_string(string: str) -> Tuple[FormatString, FormatErrors]: +def parse_format_string(string: str) -> tuple[FormatString, FormatErrors]: state = _ParserState(string) children = _parse_children(state, end_at=None) return FormatString(children), state.errors diff --git a/pyanalyze/functions.py b/pyanalyze/functions.py index 990d37b5..9e76343c 100644 --- a/pyanalyze/functions.py +++ b/pyanalyze/functions.py @@ -13,7 +13,7 @@ from collections.abc import Iterable, Sequence from dataclasses import dataclass, replace from itertools import zip_longest -from typing import List, Optional, Tuple, TypeVar, Union +from typing import Optional, TypeVar, Union import asynq from typing_extensions import Protocol @@ -91,7 +91,7 @@ class FunctionInfo: # a list of tuples of (decorator function, applied decorator function, AST node). These are # different for decorators that take arguments, like @asynq(): the first element will be the # asynq function and the second will be the result of calling asynq(). - decorators: List[Tuple[Value, Value, ast.AST]] + decorators: list[tuple[Value, Value, ast.AST]] node: FunctionNode params: Sequence[ParamInfo] return_annotation: Optional[Value] @@ -242,7 +242,7 @@ def compute_parameters( *vararg_defaults, *kw_defaults, ] - args: List[Tuple[ParameterKind, ast.arg]] = [ + args: list[tuple[ParameterKind, ast.arg]] = [ (ParameterKind.POSITIONAL_ONLY, arg) for arg in posonly_args ] + [(ParameterKind.POSITIONAL_OR_KEYWORD, arg) for arg in node.args.args] if node.args.vararg is not None: @@ -253,7 +253,7 @@ def compute_parameters( params = [] tv_index = 1 - seen_paramspec_args: Optional[Tuple[ast.arg, ParamSpecArgsValue]] = None + seen_paramspec_args: Optional[tuple[ast.arg, ParamSpecArgsValue]] = None for idx, (param, default) in enumerate(zip_longest(args, defaults)): assert param is not None, "must have more args than defaults" (kind, arg) = param diff --git a/pyanalyze/implementation.py b/pyanalyze/implementation.py index 0d8c1465..5b546822 100644 --- a/pyanalyze/implementation.py +++ b/pyanalyze/implementation.py @@ -6,7 +6,7 @@ import typing from collections.abc import Iterable, Sequence from itertools import product -from typing import Callable, Dict, NewType, Optional, Type, TypeVar, Union, cast +from typing import Callable, NewType, Optional, TypeVar, Union, cast import qcore import typing_extensions @@ -1457,14 +1457,17 @@ def _str_format_impl(ctx: CallContext) -> Value: unused_indices = set(range(len(args))) - used_indices if unused_indices: ctx.show_error( - "Numbered argument(s) %s were not used" - % ", ".join(map(str, sorted(unused_indices))), + "Numbered argument(s) {} were not used".format( + ", ".join(map(str, sorted(unused_indices))) + ), error_code=ErrorCode.incompatible_call, ) unused_kwargs = set(kwargs) - used_kwargs if unused_kwargs: ctx.show_error( - "Named argument(s) %s were not used" % ", ".join(sorted(unused_kwargs)), + "Named argument(s) {} were not used".format( + ", ".join(sorted(unused_kwargs)) + ), error_code=ErrorCode.incompatible_call, ) return TypedValue(str) @@ -1557,7 +1560,7 @@ def len_of_value(val: Value) -> Value: return TypedValue(int) -def len_transformer(val: Value, op: Type[ast.AST], comparator: object) -> Value: +def len_transformer(val: Value, op: type[ast.AST], comparator: object) -> Value: if not isinstance(comparator, int): return val if isinstance(len_of_value(val), KnownValue): @@ -1671,7 +1674,7 @@ def _namedtuple_impl(ctx: CallContext) -> Value: V = TypeVar("V") -def get_default_argspecs() -> Dict[object, Signature]: +def get_default_argspecs() -> dict[object, Signature]: signatures = [ # pyanalyze helpers Signature.make( @@ -2270,7 +2273,7 @@ def _re_impl_with_pattern(ctx: CallContext) -> Value: def get_default_argspecs_with_cache( asc: "pyanalyze.arg_spec.ArgSpecCache", -) -> Dict[object, ConcreteSignature]: +) -> dict[object, ConcreteSignature]: sigs = {} for func in ( re.compile, diff --git a/pyanalyze/importer.py b/pyanalyze/importer.py index 2f5eefda..0d0c8aec 100644 --- a/pyanalyze/importer.py +++ b/pyanalyze/importer.py @@ -11,7 +11,7 @@ from functools import lru_cache from pathlib import Path from types import ModuleType -from typing import Optional, Tuple, cast +from typing import Optional, cast @lru_cache @@ -21,7 +21,7 @@ def directory_has_init(path: Path) -> bool: def load_module_from_file( filename: str, *, verbose: bool = False, import_paths: Sequence[str] = () -) -> Tuple[Optional[ModuleType], bool]: +) -> tuple[Optional[ModuleType], bool]: """Import the Python code in the given file. Return a tuple (module object, whether it is a compiled file). diff --git a/pyanalyze/name_check_visitor.py b/pyanalyze/name_check_visitor.py index 88f8b6bf..2cf5ec51 100644 --- a/pyanalyze/name_check_visitor.py +++ b/pyanalyze/name_check_visitor.py @@ -37,12 +37,7 @@ Callable, ClassVar, ContextManager, - Dict, - List, Optional, - Set, - Tuple, - Type, TypeVar, Union, ) @@ -344,7 +339,7 @@ def __enter__(self) -> T: def __exit__( self, - __exc_type: Optional[Type[BaseException]], + __exc_type: Optional[type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[types.TracebackType], ) -> U: @@ -357,7 +352,7 @@ async def __aenter__(self) -> T: async def __aexit__( self, - __exc_type: Optional[Type[BaseException]], + __exc_type: Optional[type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[types.TracebackType], ) -> U: @@ -562,7 +557,7 @@ class IgnoredUnusedAttributes(StringSequenceOption): ] -class IgnoredUnusedClassAttributes(ConcatenatedOption[Tuple[type, Set[str]]]): +class IgnoredUnusedClassAttributes(ConcatenatedOption[tuple[type, set[str]]]): """List of pairs of (class, set of attribute names). When these attribute names are seen as unused on a child or base class of the class, they are not listed.""" @@ -571,7 +566,7 @@ class IgnoredUnusedClassAttributes(ConcatenatedOption[Tuple[type, Set[str]]]): should_create_command_line_option = False # too complicated @classmethod - def parse(cls, data: object, source_path: Path) -> Sequence[Tuple[type, Set[str]]]: + def parse(cls, data: object, source_path: Path) -> Sequence[tuple[type, set[str]]]: if not isinstance(data, (list, tuple)): raise InvalidConfigOption.from_parser( cls, "sequence of (type, [attribute]) pairs", data @@ -696,7 +691,7 @@ def __enter__(self) -> Optional["ClassAttributeChecker"]: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_value: Optional[BaseException], exc_traceback: Optional[types.TracebackType], ) -> None: @@ -850,7 +845,7 @@ def check_unused_attributes(self) -> None: """ all_attrs_read = collections.defaultdict(set) - def _add_attrs(typ: Any, attr_names_read: Set[str]) -> None: + def _add_attrs(typ: Any, attr_names_read: set[str]) -> None: if typ is None: return all_attrs_read[typ] |= attr_names_read @@ -879,7 +874,7 @@ def _add_attrs(typ: Any, attr_names_read: Set[str]) -> None: print(f"Unused method: {typ!r}.{attr}") # sort by module + name in order to get errors in a reasonable order - def _cls_sort(self, pair: Tuple[Any, Any]) -> Tuple[str, ...]: + def _cls_sort(self, pair: tuple[Any, Any]) -> tuple[str, ...]: typ = pair[0] if hasattr(typ, "__name__") and isinstance(typ.__name__, str): return (str(typ.__module__), str(typ.__name__)) @@ -1018,7 +1013,7 @@ def _should_reject_unexamined(self, base_cls: type) -> bool: return result -_AstType = Union[Type[ast.AST], Tuple[Type[ast.AST], ...]] +_AstType = Union[type[ast.AST], tuple[type[ast.AST], ...]] class StackedContexts: @@ -1028,7 +1023,7 @@ class StackedContexts: """ - contexts: List[ast.AST] + contexts: list[ast.AST] def __init__(self) -> None: self.contexts = [] @@ -1078,13 +1073,13 @@ class NameCheckVisitor(node_visitor.ReplacingNodeVisitor): config_filename: ClassVar[Optional[str]] = None """Path (relative to this class's file) to a pyproject.toml config file.""" - _argspec_to_retval: Dict[int, Tuple[Value, MaybeSignature]] + _argspec_to_retval: dict[int, tuple[Value, MaybeSignature]] _has_used_any_match: bool - _method_cache: Dict[Type[ast.AST], Callable[[Any], Optional[Value]]] - _name_node_to_statement: Optional[Dict[ast.AST, Optional[ast.AST]]] + _method_cache: dict[type[ast.AST], Callable[[Any], Optional[Value]]] + _name_node_to_statement: Optional[dict[ast.AST, Optional[ast.AST]]] _should_exclude_any: bool - _statement_types: Set[Type[ast.AST]] - ann_assign_type: Optional[Tuple[Optional[Value], bool]] + _statement_types: set[type[ast.AST]] + ann_assign_type: Optional[tuple[Optional[Value], bool]] annotate: bool arg_spec_cache: ArgSpecCache async_kind: AsyncFunctionKind @@ -1094,17 +1089,17 @@ class NameCheckVisitor(node_visitor.ReplacingNodeVisitor): checker: Checker collector: Optional[CallSiteCollector] current_class: Optional[type] - current_enum_members: Optional[Dict[object, str]] + current_enum_members: Optional[dict[object, str]] current_function: Optional[object] current_function_info: Optional[FunctionInfo] current_function_name: Optional[str] error_for_implicit_any: bool expected_return_value: Optional[Value] - future_imports: Set[str] + future_imports: set[str] in_annotation: bool in_comprehension_body: bool in_union_decomposition: bool - import_name_to_node: Dict[str, Union[ast.Import, ast.ImportFrom]] + import_name_to_node: dict[str, Union[ast.Import, ast.ImportFrom]] is_async_def: bool is_compiled: bool is_generator: bool @@ -1113,7 +1108,7 @@ class NameCheckVisitor(node_visitor.ReplacingNodeVisitor): node_context: StackedContexts options: Options reexport_tracker: ImplicitReexportTracker - return_values: List[Optional[Value]] + return_values: list[Optional[Value]] scopes: StackedScopes state: VisitorState unused_finder: UnusedObjectFinder @@ -1279,7 +1274,7 @@ def __reduce_ex__(self, proto: object) -> object: # Only pickle the attributes needed to get error reporting working return self.__class__, (self.filename, self.contents, self.tree, self.settings) - def _load_module(self) -> Tuple[Optional[types.ModuleType], bool]: + def _load_module(self) -> tuple[Optional[types.ModuleType], bool]: """Sets the module_path and module for this file.""" if not self.filename: return None, False @@ -1330,7 +1325,7 @@ def _load_module(self) -> Tuple[Optional[types.ModuleType], bool]: traceback.print_exc() return None, False - def check(self, ignore_missing_module: bool = False) -> List[node_visitor.Failure]: + def check(self, ignore_missing_module: bool = False) -> list[node_visitor.Failure]: """Run the visitor on this module.""" start_time = qcore.utime() try: @@ -1457,7 +1452,7 @@ def _show_error_if_checking( *, replacement: Optional[node_visitor.Replacement] = None, detail: Optional[str] = None, - extra_metadata: Optional[Dict[str, Any]] = None, + extra_metadata: Optional[dict[str, Any]] = None, ) -> None: """We usually should show errors only in the check_names state to avoid duplicate errors.""" if self._is_checking(): @@ -1478,7 +1473,7 @@ def _set_name_in_scope( *, private: bool = False, lookup_node: object = None, - ) -> Tuple[Value, VarnameOrigin]: + ) -> tuple[Value, VarnameOrigin]: if lookup_node is None: lookup_node = node @@ -1531,7 +1526,7 @@ def _set_name_in_scope( def _get_base_class_attributes( self, varname: str, node: ast.AST - ) -> Iterable[Tuple[Union[type, str], Value]]: + ) -> Iterable[tuple[Union[type, str], Value]]: if self.current_class is None: return for base_class in self.get_generic_bases(self.current_class): @@ -1684,7 +1679,7 @@ def resolve_name( node: ast.Name, error_node: Optional[ast.AST] = None, suppress_errors: bool = False, - ) -> Tuple[Value, VarnameOrigin]: + ) -> tuple[Value, VarnameOrigin]: """Resolves a Name node to a value. :param node: Node to resolve the name from @@ -1756,7 +1751,7 @@ def resolve_name( def _get_first_import_node(self) -> ast.stmt: return min(self.import_name_to_node.values(), key=lambda node: node.lineno) - def _generic_visit_list(self, lst: Iterable[ast.AST]) -> List[Value]: + def _generic_visit_list(self, lst: Iterable[ast.AST]) -> list[Value]: return [self.visit(node) for node in lst] def _is_write_ctx(self, ctx: ast.AST) -> bool: @@ -2877,7 +2872,7 @@ def _get_import_from_module(self, node: ast.ImportFrom) -> Value: if self.filename.endswith("/__init__.py"): level -= 1 - current_module_path: List[str] = self.module.__name__.split(".") + current_module_path: list[str] = self.module.__name__.split(".") if level >= len(current_module_path): self._show_error_if_checking( node, @@ -3113,7 +3108,7 @@ def _visit_comprehension_inner( def visit_JoinedStr(self, node: ast.JoinedStr) -> Value: elements = self._generic_visit_list(node.values) limit = self.options.get_value_for(UnionSimplificationLimit) - possible_values: List[List[str]] = [[]] + possible_values: list[list[str]] = [[]] for elt in elements: subvals = list(flatten_values(elt)) # Bail out if the list of possible values gets too long. @@ -3243,7 +3238,7 @@ def _name_exists(self, name: str) -> bool: def visit_Dict(self, node: ast.Dict) -> Value: ret = {} - all_pairs: List[KVPair] = [] + all_pairs: list[KVPair] = [] has_non_literal = False for key_node, value_node in zip(node.keys, node.values): value_val = self.visit(value_node) @@ -3941,14 +3936,6 @@ def visit_Slice(self, node: ast.Slice) -> Value: return TypedValue(slice) # These two are unused in 3.9 and higher - if sys.version_info < (3, 9): - - def visit_ExtSlice(self, node: ast.ExtSlice) -> Value: - dims = [self.visit(dim) for dim in node.dims] - return self._maybe_make_sequence(tuple, dims, node) - - def visit_Index(self, node: ast.Index) -> Value: - return self.visit(node.value) # Control flow @@ -4310,7 +4297,7 @@ def visit_While(self, node: ast.While) -> None: self._set_name_in_scope(LEAVES_SCOPE, node, AnyValue(AnySource.marker)) def _handle_loop_else( - self, orelse: List[ast.stmt], body_scope: SubScope, always_entered: bool + self, orelse: list[ast.stmt], body_scope: SubScope, always_entered: bool ) -> None: if always_entered: self.scopes.combine_subscopes([body_scope]) @@ -4383,7 +4370,7 @@ def visit_AsyncWith(self, node: ast.AsyncWith) -> None: def visit_single_cm( self, - items: List[ast.withitem], + items: list[ast.withitem], body: Iterable[ast.AST], *, is_async: bool = False, @@ -4510,7 +4497,7 @@ def visit_ExceptHandler(self, node: ast.ExceptHandler) -> None: def _extract_exception_types( self, typ: Value, node: ast.AST, is_try_star: bool = False - ) -> List[Tuple[bool, Value]]: + ) -> list[tuple[bool, Value]]: possible_types = [] for subval in flatten_values(typ, unwrap_annotated=True): subval = replace_known_sequence_value(subval) @@ -4601,7 +4588,7 @@ def _subscope_and_maybe_supress(self, should_suppress: bool) -> Iterator[SubScop def constraint_from_condition( self, node: ast.AST, check_boolability: bool = True - ) -> Tuple[Value, AbstractConstraint]: + ) -> tuple[Value, AbstractConstraint]: condition = self._visit_possible_constraint(node) constraint = extract_constraints(condition) if self._is_collecting(): @@ -4710,8 +4697,7 @@ def visit_Assign(self, node: ast.Assign) -> None: if value.val in self.current_enum_members: self._show_error_if_checking( node, - "Duplicate enum member: %s is used for both %s and %s" - % ( + "Duplicate enum member: {} is used for both {} and {}".format( value.val, self.current_enum_members[value.val], ", ".join(names), @@ -5118,7 +5104,7 @@ def _check_dunder_call_or_catch( method_name: str, args: Iterable[Composite], allow_call: bool = False, - ) -> Union[Value, List[node_visitor.Error]]: + ) -> Union[Value, list[node_visitor.Error]]: """Use this for checking a dunder call that may fall back to another. There are three cases: @@ -5149,7 +5135,7 @@ def _check_dunder_call( method_name: str, args: Iterable[Composite], allow_call: bool = False, - ) -> Tuple[Value, bool]: + ) -> tuple[Value, bool]: val = callee_composite.value if isinstance(val, AnnotatedValue): val = val.value @@ -5185,7 +5171,7 @@ def _check_dunder_call_no_mvv( method_name: str, args: Iterable[Composite], allow_call: bool = False, - ) -> Tuple[Value, bool]: + ) -> tuple[Value, bool]: method_object = self._get_dunder(node, callee_composite.value, method_name) if method_object is UNINITIALIZED_VALUE: return AnyValue(AnySource.error), False @@ -5467,7 +5453,7 @@ def _should_ignore_val(self, node: ast.AST) -> bool: # Call nodes - def visit_keyword(self, node: ast.keyword) -> Tuple[Optional[str], Composite]: + def visit_keyword(self, node: ast.keyword) -> tuple[Optional[str], Composite]: return (node.arg, self.composite_from_node(node.value)) def visit_Call(self, node: ast.Call) -> Value: @@ -5514,11 +5500,11 @@ def visit_Call(self, node: ast.Call) -> Value: return return_value def _can_perform_call( - self, args: Iterable[Value], keywords: Iterable[Tuple[Optional[str], Value]] + self, args: Iterable[Value], keywords: Iterable[tuple[Optional[str], Value]] ) -> Annotated[ bool, ParameterTypeGuard["args", Iterable[KnownValue]], - ParameterTypeGuard["keywords", Iterable[Tuple[str, KnownValue]]], + ParameterTypeGuard["keywords", Iterable[tuple[str, KnownValue]]], ]: """Returns whether all of the arguments were inferred successfully.""" return all(isinstance(arg, KnownValue) for arg in args) and all( @@ -5531,7 +5517,7 @@ def check_call( node: Optional[ast.AST], callee: Value, args: Iterable[Composite], - keywords: Iterable[Tuple[Optional[str], Composite]] = (), + keywords: Iterable[tuple[Optional[str], Composite]] = (), *, allow_call: bool = False, ) -> Value: @@ -5568,7 +5554,7 @@ def _check_call_no_mvv( node: Optional[ast.AST], callee_wrapped: Value, args: Iterable[Composite], - keywords: Iterable[Tuple[Optional[str], Composite]] = (), + keywords: Iterable[tuple[Optional[str], Composite]] = (), *, allow_call: bool = False, ) -> Value: @@ -5825,14 +5811,14 @@ def get_description_for_error_code(cls, error_code: Error) -> str: @classmethod def get_default_directories( cls, checker: Checker, **kwargs: Any - ) -> Tuple[str, ...]: + ) -> tuple[str, ...]: paths = checker.options.get_value_for(Paths) return tuple(str(path) for path in paths) @classmethod def _get_default_settings( cls, - ) -> Optional[Dict[node_visitor.ErrorCodeInstance, bool]]: + ) -> Optional[dict[node_visitor.ErrorCodeInstance, bool]]: return {} @classmethod @@ -5877,13 +5863,13 @@ def is_enabled(self, error_code: node_visitor.ErrorCodeInstance) -> bool: @classmethod def perform_final_checks( cls, kwargs: Mapping[str, Any] - ) -> List[node_visitor.Failure]: + ) -> list[node_visitor.Failure]: return kwargs["checker"].perform_final_checks() @classmethod def _run_on_files( cls, - files: List[str], + files: list[str], *, checker: Checker, find_unused: bool = False, @@ -5891,7 +5877,7 @@ def _run_on_files( attribute_checker: Optional[ClassAttributeChecker] = None, unused_finder: Optional[UnusedObjectFinder] = None, **kwargs: Any, - ) -> List[node_visitor.Failure]: + ) -> list[node_visitor.Failure]: attribute_checker_enabled = checker.options.is_error_code_enabled_anywhere( ErrorCode.attribute_is_never_set ) @@ -5948,7 +5934,7 @@ def check_file_in_worker( filename: str, attribute_checker: Optional[ClassAttributeChecker] = None, **kwargs: Any, - ) -> Tuple[List[node_visitor.Failure], Any]: + ) -> tuple[list[node_visitor.Failure], Any]: failures = cls.check_file( filename, attribute_checker=attribute_checker, **kwargs ) @@ -6013,7 +5999,7 @@ def build_stacked_scopes( return StackedScopes(module_vars, module, simplification_limit=simplification_limit) -def _get_task_cls(fn: object) -> "Type[asynq.FutureBase[Any]]": +def _get_task_cls(fn: object) -> "type[asynq.FutureBase[Any]]": """Returns the task class for an async function.""" if hasattr(fn, "task_cls"): diff --git a/pyanalyze/node_visitor.py b/pyanalyze/node_visitor.py index 96f24783..ee5a49d7 100644 --- a/pyanalyze/node_visitor.py +++ b/pyanalyze/node_visitor.py @@ -25,7 +25,7 @@ from enum import Enum from pathlib import Path from types import ModuleType -from typing import Any, Dict, List, Optional, Tuple, Type, Union +from typing import Any, Optional, Union import codemod import qcore @@ -35,8 +35,8 @@ from . import analysis_lib, error_code from .safe import safe_getattr, safe_isinstance -Error = Dict[str, Any] -ErrorCodeContainer = Union[error_code.ErrorRegistry, Type[Enum]] +Error = dict[str, Any] +ErrorCodeContainer = Union[error_code.ErrorRegistry, type[Enum]] ErrorCodeInstance = Union[error_code.Error, Enum] @@ -66,7 +66,7 @@ def __init__( self, start_line_number: int, end_line_number: Optional[int] = None, - new_lines: Optional[List[str]] = None, + new_lines: Optional[list[str]] = None, path: Optional[str] = None, description: Optional[str] = None, ) -> None: @@ -84,9 +84,9 @@ def render_range(self) -> str: class _Query: """Simple equivalent of codemod.Query.""" - patches: List[_PatchWithDescription] + patches: list[_PatchWithDescription] - def generate_patches(self) -> List[_PatchWithDescription]: + def generate_patches(self) -> list[_PatchWithDescription]: return self.patches @@ -135,11 +135,11 @@ class Failure(TypedDict): col_offset: NotRequired[int] context: NotRequired[str] message: NotRequired[str] - extra_metadata: NotRequired[Dict[str, Any]] + extra_metadata: NotRequired[dict[str, Any]] class ErrorContext(Protocol): - all_failures: List[Failure] + all_failures: list[Failure] def show_error( self, @@ -149,7 +149,7 @@ def show_error( *, detail: Optional[str] = None, save: bool = True, - extra_metadata: Optional[Dict[str, Any]] = None, + extra_metadata: Optional[dict[str, Any]] = None, ) -> Optional[Failure]: raise NotImplementedError @@ -163,12 +163,12 @@ class BaseNodeVisitor(ast.NodeVisitor): default_module: Optional[ModuleType] = None # module to run on by default # whether to look at FILE_ENVIRON_KEY to find files to run on should_check_environ_for_files: bool = True - caught_errors: Optional[List[Dict[str, Any]]] = None + caught_errors: Optional[list[dict[str, Any]]] = None - _changes_for_fixer: Dict[str, List[Replacement]] = collections.defaultdict(list) + _changes_for_fixer: dict[str, list[Replacement]] = collections.defaultdict(list) tree: ast.Module - all_failures: List[Failure] + all_failures: list[Failure] is_code_only: bool def __init__( @@ -209,7 +209,7 @@ def __init__( self.caught_errors = None self.is_code_only = is_code_only - def check(self) -> List[Failure]: + def check(self) -> list[Failure]: """Runs the class's checks on a tree.""" self.log(logging.INFO, "Check file", self.filename) self.visit(self.tree) @@ -233,7 +233,7 @@ def log(self, level: int, label: str, value: object) -> None: self.logger.log(level, f"{qcore.safe_str(label)}: {qcore.safe_str(value)}") @qcore.caching.cached_per_instance() - def _lines(self) -> List[str]: + def _lines(self) -> list[str]: return [line + "\n" for line in self.contents.splitlines()] @qcore.caching.cached_per_instance() @@ -256,7 +256,7 @@ def has_file_level_ignore( return True return False - def get_unused_ignores(self) -> List[Tuple[int, str]]: + def get_unused_ignores(self) -> list[tuple[int, str]]: """Returns line numbers and lines that have unused ignore comments.""" return [ (i, line) @@ -298,7 +298,7 @@ def check_file( assert_passes: bool = True, include_tests: bool = False, **kwargs: Any, - ) -> List[Failure]: + ) -> list[Failure]: """Run checks on a single file. include_tests and assert_passes are arguments here for compatibility with check_all_files. @@ -317,7 +317,7 @@ def check_file( @classmethod def check_all_files( cls, include_tests: bool = False, assert_passes: bool = True, **kwargs: Any - ) -> List[Failure]: + ) -> list[Failure]: """Runs the check for all files in scope or changed files if we are test-local.""" if "settings" not in kwargs: kwargs["settings"] = cls._get_default_settings() @@ -331,7 +331,7 @@ def check_all_files( return all_failures @classmethod - def get_files_to_check(cls, include_tests: bool, **kwargs: Any) -> List[str]: + def get_files_to_check(cls, include_tests: bool, **kwargs: Any) -> list[str]: """Produce the list of files to check.""" if cls.should_check_environ_for_files: environ_files = get_files_to_check_from_environ() @@ -354,7 +354,7 @@ def prepare_constructor_kwargs(cls, kwargs: Mapping[str, Any]) -> Mapping[str, A return kwargs @classmethod - def perform_final_checks(cls, kwargs: Mapping[str, Any]) -> List[Failure]: + def perform_final_checks(cls, kwargs: Mapping[str, Any]) -> list[Failure]: return [] @classmethod @@ -425,13 +425,13 @@ def main(cls) -> int: return 1 if failures else 0 @classmethod - def _write_json_report(cls, output_file: str, failures: List[Failure]) -> None: + def _write_json_report(cls, output_file: str, failures: list[Failure]) -> None: failures = [_make_serializable(failure) for failure in failures] with open(output_file, "w") as f: json.dump(failures, f) @classmethod - def _write_markdown_report(cls, output_file: str, failures: List[Failure]) -> None: + def _write_markdown_report(cls, output_file: str, failures: list[Failure]) -> None: by_file = collections.defaultdict(list) for failure in failures: by_file[failure["filename"]].append(failure) @@ -503,7 +503,7 @@ def _run_and_apply_changes( return had_failure @classmethod - def _apply_changes(cls, changes: Dict[str, List[Replacement]]) -> None: + def _apply_changes(cls, changes: dict[str, list[Replacement]]) -> None: for filename, changeset in changes.items(): with open(filename) as f: lines = f.readlines() @@ -513,7 +513,7 @@ def _apply_changes(cls, changes: Dict[str, List[Replacement]]) -> None: @classmethod def _apply_changes_to_lines( - cls, changes: List[Replacement], input_lines: Sequence[str] + cls, changes: list[Replacement], input_lines: Sequence[str] ) -> Sequence[str]: # only apply the first change because that change might affect other fixes # that test_scope came up for that file. So we break after finding first applicable fix. @@ -532,7 +532,7 @@ def _apply_changes_to_lines( return lines @classmethod - def _get_default_settings(cls) -> Optional[Dict[ErrorCodeInstance, bool]]: + def _get_default_settings(cls) -> Optional[dict[ErrorCodeInstance, bool]]: if cls.error_code_enum is None: return None else: @@ -541,7 +541,7 @@ def _get_default_settings(cls) -> Optional[Dict[ErrorCodeInstance, bool]]: } @contextmanager - def catch_errors(self) -> Iterator[List[Error]]: + def catch_errors(self) -> Iterator[list[Error]]: caught_errors = [] with qcore.override(self, "caught_errors", caught_errors): yield caught_errors @@ -566,7 +566,7 @@ def show_error( ignore_comment: str = IGNORE_COMMENT, detail: Optional[str] = None, save: bool = True, - extra_metadata: Optional[Dict[str, Any]] = None, + extra_metadata: Optional[dict[str, Any]] = None, ) -> Optional[Failure]: """Shows an error associated with this node. @@ -713,13 +713,13 @@ def show_error( raise VisitorError(message, error_code) return error - def _get_attribute_path(self, node: ast.AST) -> Optional[List[str]]: + def _get_attribute_path(self, node: ast.AST) -> Optional[list[str]]: return analysis_lib.get_attribute_path(node) @classmethod def _run( cls, profile: bool = False, num_iterations: int = 1, **kwargs: Any - ) -> Optional[List[Failure]]: + ) -> Optional[list[Failure]]: result = None for _ in range(num_iterations): if profile: @@ -735,7 +735,7 @@ def _run_on_files_or_all( files: Optional[Sequence[str]] = None, code: Optional[str] = None, **kwargs: Any, - ) -> List[Failure]: + ) -> list[Failure]: if code is not None: return cls._run_on_code(code, **kwargs) files = files or cls.get_default_directories(**kwargs) @@ -745,7 +745,7 @@ def _run_on_files_or_all( return cls._run_on_files(_get_all_files(files), **kwargs) @classmethod - def _run_on_code(cls, code: str, **kwargs: Any) -> List[Failure]: + def _run_on_code(cls, code: str, **kwargs: Any) -> list[Failure]: try: tree = ast.parse(code) except Exception as e: @@ -758,7 +758,7 @@ def _run_on_code(cls, code: str, **kwargs: Any) -> List[Failure]: return cls("", code, tree, is_code_only=True, **kwargs).check() @classmethod - def _run_on_files(cls, files: Iterable[str], **kwargs: Any) -> List[Failure]: + def _run_on_files(cls, files: Iterable[str], **kwargs: Any) -> list[Failure]: all_failures = [] args = [(filename, kwargs) for filename in sorted(files)] if kwargs.pop("parallel", False): @@ -776,8 +776,8 @@ def _run_on_files(cls, files: Iterable[str], **kwargs: Any) -> List[Failure]: @classmethod def _check_file_single_arg( - cls, args: Tuple[str, Dict[str, Any]] - ) -> Tuple[List[Failure], Any]: + cls, args: tuple[str, dict[str, Any]] + ) -> tuple[list[Failure], Any]: filename, kwargs = args main_module = sys.modules["__main__"] try: @@ -790,7 +790,7 @@ def _check_file_single_arg( @classmethod def check_file_in_worker( cls, filename: str, **kwargs: Any - ) -> Tuple[List[Failure], Any]: + ) -> tuple[list[Failure], Any]: """Checks a single file in a parallel worker. Returns a tuple of (failures, extra data). The extra data will be passed to @@ -821,12 +821,7 @@ def _get_argument_parser(cls) -> argparse.ArgumentParser: for code in cls.error_code_enum: enabled_string = "on" if cls.is_enabled_by_default(code) else "off" code_descriptions.append( - " - %s: %s (default: %s)" - % ( - code.name, - cls.get_description_for_error_code(code), - enabled_string, - ) + f" - {code.name}: {cls.get_description_for_error_code(code)} (default: {enabled_string})" ) epilog = "Supported checks:\n" + "\n".join(code_descriptions) @@ -1126,7 +1121,7 @@ def visit(self, node: ast.AST) -> Any: def get_files_to_check_from_environ( environ_key: str = FILE_ENVIRON_KEY, -) -> Optional[List[str]]: +) -> Optional[list[str]]: """Returns any files to run on specified in the FILE_ENVIRON_KEY that we should run on. If the key isn't in the environ, return None. @@ -1171,7 +1166,7 @@ def __exit__(self, typ: object, value: object, traceback: object) -> None: print(f"profiler output saved as {self.filename}") -def _make_serializable(failure: Failure) -> Dict[str, Any]: +def _make_serializable(failure: Failure) -> dict[str, Any]: result = dict(failure) if "code" in failure: result["code"] = failure["code"].name diff --git a/pyanalyze/options.py b/pyanalyze/options.py index 0fd27a4a..d0b7c18a 100644 --- a/pyanalyze/options.py +++ b/pyanalyze/options.py @@ -12,18 +12,7 @@ from collections.abc import Collection, Iterable, Mapping, Sequence from dataclasses import dataclass from pathlib import Path -from typing import ( - Any, - ClassVar, - Dict, - FrozenSet, - Generic, - Optional, - Set, - Tuple, - Type, - TypeVar, -) +from typing import Any, ClassVar, Generic, Optional, TypeVar import qcore import tomli @@ -64,7 +53,7 @@ def format_usage(self) -> str: T = TypeVar("T") -ModulePath = Tuple[str, ...] +ModulePath = tuple[str, ...] class InvalidConfigOption(Exception): @@ -72,7 +61,7 @@ class InvalidConfigOption(Exception): @classmethod def from_parser( - cls, option_cls: Type["ConfigOption"], expected: str, value: object + cls, option_cls: type["ConfigOption"], expected: str, value: object ) -> "InvalidConfigOption": return cls( f"Invalid value for option {option_cls.name}: expected {expected} but got" @@ -86,7 +75,7 @@ class NotFound(Exception): @dataclass class ConfigOption(Generic[T]): - registry: ClassVar[Dict[str, Type["ConfigOption"]]] = {} + registry: ClassVar[dict[str, type["ConfigOption"]]] = {} name: ClassVar[str] is_global: ClassVar[bool] = False @@ -106,12 +95,12 @@ def __init_subclass__(cls) -> None: raise ValueError(f"{cls} is missing a default value") @classmethod - def parse(cls: "Type[ConfigOption[T]]", data: object, source_path: Path) -> T: + def parse(cls: "type[ConfigOption[T]]", data: object, source_path: Path) -> T: raise NotImplementedError @classmethod def get_value_from_instances( - cls: "Type[ConfigOption[T]]", + cls: "type[ConfigOption[T]]", instances: Sequence["ConfigOption[T]"], module_path: ModulePath, ) -> T: @@ -123,7 +112,7 @@ def get_value_from_instances( def is_applicable_to(self, module_path: ModulePath) -> bool: return module_path[: len(self.applicable_to)] == self.applicable_to - def sort_key(self) -> Tuple[object, ...]: + def sort_key(self) -> tuple[object, ...]: """We sort with the most specific option first.""" return ( not self.from_command_line, # command line options first @@ -140,7 +129,7 @@ class BooleanOption(ConfigOption[bool]): default_value = False @classmethod - def parse(cls: "Type[BooleanOption]", data: object, source_path: Path) -> bool: + def parse(cls: "type[BooleanOption]", data: object, source_path: Path) -> bool: if isinstance(data, bool): return data raise InvalidConfigOption.from_parser(cls, "bool", data) @@ -157,7 +146,7 @@ def create_command_line_option(cls, parser: argparse.ArgumentParser) -> None: class IntegerOption(ConfigOption[int]): @classmethod - def parse(cls: "Type[IntegerOption]", data: object, source_path: Path) -> int: + def parse(cls: "type[IntegerOption]", data: object, source_path: Path) -> int: if isinstance(data, int): return data raise InvalidConfigOption.from_parser(cls, "int", data) @@ -177,7 +166,7 @@ class ConcatenatedOption(ConfigOption[Sequence[T]]): @classmethod def get_value_from_instances( - cls: "Type[ConcatenatedOption[T]]", + cls: "type[ConcatenatedOption[T]]", instances: Sequence["ConcatenatedOption[T]"], module_path: ModulePath, ) -> Sequence[T]: @@ -194,7 +183,7 @@ class StringSequenceOption(ConcatenatedOption[str]): @classmethod def parse( - cls: "Type[StringSequenceOption]", data: object, source_path: Path + cls: "type[StringSequenceOption]", data: object, source_path: Path ) -> Sequence[str]: if isinstance(data, (list, tuple)) and all( isinstance(elt, str) for elt in data @@ -217,7 +206,7 @@ class PathSequenceOption(ConfigOption[Sequence[Path]]): @classmethod def parse( - cls: "Type[PathSequenceOption]", data: object, source_path: Path + cls: "type[PathSequenceOption]", data: object, source_path: Path ) -> Sequence[str]: if isinstance(data, (list, tuple)) and all( isinstance(elt, str) for elt in data @@ -243,7 +232,7 @@ class PyObjectSequenceOption(ConcatenatedOption[T]): @classmethod def parse( - cls: "Type[PyObjectSequenceOption[T]]", data: object, source_path: Path + cls: "type[PyObjectSequenceOption[T]]", data: object, source_path: Path ) -> Sequence[T]: if not isinstance(data, (list, tuple)): raise InvalidConfigOption.from_parser( @@ -300,13 +289,13 @@ def from_option_list( def for_module(self, module_path: ModulePath) -> "Options": return Options(self.options, module_path) - def get_value_for(self, option: Type[ConfigOption[T]]) -> T: + def get_value_for(self, option: type[ConfigOption[T]]) -> T: try: return self._get_value_for_no_default(option) except NotFound: return option.default_value - def _get_value_for_no_default(self, option: Type[ConfigOption[T]]) -> T: + def _get_value_for_no_default(self, option: type[ConfigOption[T]]) -> T: instances = [*self.options.get(option.name, ()), option(option.default_value)] return option.get_value_from_instances(instances, self.module_path) @@ -372,7 +361,7 @@ def parse_config_file( @functools.lru_cache -def get_all_error_codes() -> FrozenSet[str]: +def get_all_error_codes() -> frozenset[str]: return frozenset({error_code.name for error_code in ErrorCode}) @@ -390,7 +379,7 @@ def _parse_config_section( "Top-level configuration should not set module option" ) - enabled_error_codes: Set[str] = set() + enabled_error_codes: set[str] = set() all_error_codes = get_all_error_codes() disable_all_default_error_codes = False diff --git a/pyanalyze/patma.py b/pyanalyze/patma.py index 20662a9e..db448e14 100644 --- a/pyanalyze/patma.py +++ b/pyanalyze/patma.py @@ -10,7 +10,7 @@ import itertools from collections.abc import Container, Sequence from dataclasses import dataclass, replace -from typing import Any, Callable, Optional, Set, Tuple, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union import qcore @@ -264,8 +264,8 @@ def visit_MatchMapping(self, node: MatchMapping) -> AbstractConstraint: ) ] kv_pairs = list(reversed(kv_pairs)) - optional_pairs: Set[KVPair] = set() - removed_pairs: Set[KVPair] = set() + optional_pairs: set[KVPair] = set() + removed_pairs: set[KVPair] = set() for key, pattern in zip(node.keys, node.patterns): key_val = self.visitor.visit(key) value, new_optional_pairs, new_removed_pairs = get_value_from_kv_pairs( @@ -440,11 +440,11 @@ def get_value_from_kv_pairs( ctx: CanAssignContext, optional_pairs: Container[KVPair], removed_pairs: Container[KVPair], -) -> Tuple[Value, Set[KVPair], Set[KVPair]]: +) -> tuple[Value, set[KVPair], set[KVPair]]: """Return the :class:`Value` for a specific key.""" possible_values = [] - covered_keys: Set[Value] = set() - new_optional_pairs: Set[KVPair] = set() + covered_keys: set[Value] = set() + new_optional_pairs: set[KVPair] = set() for pair in kv_pairs: if pair in removed_pairs: continue diff --git a/pyanalyze/reexport.py b/pyanalyze/reexport.py index 56b31c08..b6ce8e87 100644 --- a/pyanalyze/reexport.py +++ b/pyanalyze/reexport.py @@ -7,7 +7,7 @@ from ast import AST from collections import defaultdict from dataclasses import InitVar, dataclass, field -from typing import Callable, Dict, List, Set, Tuple +from typing import Callable from .error_code import ErrorCode from .node_visitor import ErrorContext @@ -27,11 +27,11 @@ class ReexportConfig(PyObjectSequenceOption[_ReexportConfigProvider]): @dataclass class ImplicitReexportTracker: options: InitVar[Options] - completed_modules: Set[str] = field(default_factory=set) - module_to_reexports: Dict[str, Set[str]] = field( + completed_modules: set[str] = field(default_factory=set) + module_to_reexports: dict[str, set[str]] = field( default_factory=lambda: defaultdict(set) ) - used_reexports: Dict[str, List[Tuple[str, AST, ErrorContext]]] = field( + used_reexports: dict[str, list[tuple[str, AST, ErrorContext]]] = field( default_factory=lambda: defaultdict(list) ) diff --git a/pyanalyze/safe.py b/pyanalyze/safe.py index 3a5911ea..9c988f68 100644 --- a/pyanalyze/safe.py +++ b/pyanalyze/safe.py @@ -8,7 +8,7 @@ import sys import typing from collections.abc import Container, Sequence -from typing import Any, Dict, NewType, Optional, Tuple, Type, TypeVar, Union +from typing import Any, NewType, Optional, TypeVar, Union import typing_extensions @@ -56,7 +56,7 @@ def safe_equals(left: object, right: object) -> bool: return False -def safe_issubclass(cls: type, class_or_tuple: Union[type, Tuple[type, ...]]) -> bool: +def safe_issubclass(cls: type, class_or_tuple: Union[type, tuple[type, ...]]) -> bool: """Safe version of ``issubclass()``. Apart from incorrect arguments, ``issubclass(a, b)`` can throw an error @@ -73,7 +73,7 @@ def safe_issubclass(cls: type, class_or_tuple: Union[type, Tuple[type, ...]]) -> return False -def safe_isinstance(obj: object, class_or_tuple: Union[type, Tuple[type, ...]]) -> bool: +def safe_isinstance(obj: object, class_or_tuple: Union[type, tuple[type, ...]]) -> bool: """Safe version of ``isinstance()``. ``isinstance(a, b)`` can throw an error in the following circumstances: @@ -115,7 +115,7 @@ def is_hashable(obj: object) -> bool: def all_of_type( - elts: Sequence[object], typ: Type[T] + elts: Sequence[object], typ: type[T] ) -> typing_extensions.TypeGuard[Sequence[T]]: """Returns whether all elements of elts are instances of typ.""" return all(isinstance(elt, typ) for elt in elts) @@ -159,10 +159,10 @@ def is_instance_of_typing_name(obj: object, name: str) -> bool: return isinstance(obj, objs) -_typing_name_cache: Dict[str, Tuple[Tuple[Any, ...], Tuple[str, ...]]] = {} +_typing_name_cache: dict[str, tuple[tuple[Any, ...], tuple[str, ...]]] = {} -def _fill_typing_name_cache(name: str) -> Tuple[Tuple[Any, ...], Tuple[str, ...]]: +def _fill_typing_name_cache(name: str) -> tuple[tuple[Any, ...], tuple[str, ...]]: try: return _typing_name_cache[name] except KeyError: diff --git a/pyanalyze/signature.py b/pyanalyze/signature.py index 46dc270e..2fb7dc23 100644 --- a/pyanalyze/signature.py +++ b/pyanalyze/signature.py @@ -19,12 +19,8 @@ Any, Callable, ClassVar, - Dict, - List, NamedTuple, Optional, - Set, - Tuple, TypeVar, Union, ) @@ -152,7 +148,7 @@ class PosOrKeyword: # None for positional args, str for keyword args, # ARGS for *args, KWARGS for **kwargs, PossibleArg for args that may # be missing, TypeVarValue for a ParamSpec. -Argument = Tuple[ +Argument = tuple[ Composite, Union[ None, @@ -165,7 +161,7 @@ class PosOrKeyword: ] # Arguments bound to a call -BoundArgs = Dict[str, Tuple[Position, Composite]] +BoundArgs = dict[str, tuple[Position, Composite]] class CheckCallContext(Protocol): @@ -193,7 +189,7 @@ def can_assign_ctx(self) -> CanAssignContext: class _CanAssignBasedContext: can_assign_ctx: CanAssignContext visitor: Optional["NameCheckVisitor"] = None - errors: List[str] = field(default_factory=list) + errors: list[str] = field(default_factory=list) def on_error( self, @@ -248,9 +244,9 @@ class ActualArguments: """ - positionals: List[Tuple[bool, Composite]] + positionals: list[tuple[bool, Composite]] star_args: Optional[Value] # represents the type of the elements of *args - keywords: Dict[str, Tuple[bool, Composite]] + keywords: dict[str, tuple[bool, Composite]] star_kwargs: Optional[Value] # represents the type of the elements of **kwargs kwargs_required: bool pos_or_keyword_params: Container[Union[int, str]] @@ -309,12 +305,12 @@ def unite_impl_rets(cls, rets: Sequence["ImplReturn"]) -> "ImplReturn": class CallContext: """The context passed to an :term:`impl` function.""" - vars: Dict[str, Value] + vars: dict[str, Value] """Dictionary of variable names passed to the function.""" visitor: "NameCheckVisitor" """Using the visitor can allow various kinds of advanced logic in impl functions.""" - composites: Dict[str, Composite] + composites: dict[str, Composite] node: Optional[ast.AST] """AST node corresponding to the function call. Useful for showing errors.""" @@ -537,7 +533,7 @@ class Signature: _return_key: ClassVar[str] = "%return" - parameters: Dict[str, SigParameter] + parameters: dict[str, SigParameter] """An ordered mapping of the signature's parameters.""" return_value: Value """What the callable returns.""" @@ -554,10 +550,10 @@ class Signature: """Type evaluator for this function.""" deprecated: Optional[str] = None """Deprecation message for this callable.""" - typevars_of_params: Dict[str, List[TypeVarLike]] = field( + typevars_of_params: dict[str, list[TypeVarLike]] = field( init=False, default_factory=dict, repr=False, compare=False, hash=False ) - all_typevars: Set[TypeVarLike] = field( + all_typevars: set[TypeVarLike] = field( init=False, default_factory=set, repr=False, compare=False, hash=False ) @@ -637,7 +633,7 @@ def _check_param_type_compatibility( ctx: CheckCallContext, typevar_map: Optional[TypeVarMap] = None, is_overload: bool = False, - ) -> Tuple[Optional[BoundsMap], bool, Optional[Value]]: + ) -> tuple[Optional[BoundsMap], bool, Optional[Value]]: """Check type compatibility for a single parameter. Returns a three-tuple: @@ -693,7 +689,7 @@ def _get_positional_parameter(self, index: int) -> Optional[SigParameter]: def _apply_annotated_constraints( self, raw_return: Union[Value, ImplReturn], - composites: Dict[str, Composite], + composites: dict[str, Composite], ctx: CheckCallContext, ) -> Value: if isinstance(raw_return, Value): @@ -785,7 +781,7 @@ def _apply_annotated_constraints( return annotate_value(return_value, extensions) def _get_typeguard_varname( - self, composites: Dict[str, Composite] + self, composites: dict[str, Composite] ) -> Optional[VarnameWithOrigin]: # This might miss some cases where we should use the second argument instead. We'll # have to come up with additional heuristics if that comes up. @@ -815,7 +811,7 @@ def bind_arguments( """ positional_index = 0 - keywords_consumed: Set[str] = set() + keywords_consumed: set[str] = set() bound_args: BoundArgs = {} star_args_consumed = False star_kwargs_consumed = False @@ -2007,7 +2003,7 @@ def preprocess_args( """Preprocess the argument list. Produces an ActualArguments object.""" # Step 1: Split up args and kwargs if possible. - processed_args: List[Argument] = [] + processed_args: list[Argument] = [] kwargs_requireds = [] param_spec = None param_spec_star_arg = None @@ -2112,8 +2108,8 @@ def preprocess_args( # any single arguments that come after *args into *args, and we merge all *args. # But for keywords, we first get all the arguments with known keys, and after that unite # all the **kwargs into a single argument. - more_processed_args: List[Tuple[bool, Composite]] = [] - more_processed_kwargs: Dict[str, Tuple[bool, Composite]] = {} + more_processed_args: list[tuple[bool, Composite]] = [] + more_processed_kwargs: dict[str, tuple[bool, Composite]] = {} star_args: Optional[Value] = None star_kwargs: Optional[Value] = None is_ellipsis: bool = False @@ -2191,7 +2187,7 @@ def preprocess_args( def _preprocess_kwargs_no_mvv( value: Value, ctx: CheckCallContext -) -> Optional[Tuple[Dict[str, Tuple[bool, Value]], Optional[Value]]]: +) -> Optional[tuple[dict[str, tuple[bool, Value]], Optional[Value]]]: """Preprocess a Value passed as **kwargs. Two possible return types: @@ -2226,10 +2222,10 @@ def _preprocess_kwargs_no_mvv( def _preprocess_kwargs_kv_pairs( items: Sequence[KVPair], ctx: CheckCallContext -) -> Optional[Tuple[Dict[str, Tuple[bool, Value]], Optional[Value]]]: +) -> Optional[tuple[dict[str, tuple[bool, Value]], Optional[Value]]]: out_items = {} possible_values = [] - covered_keys: Set[Value] = set() + covered_keys: set[Value] = set() for pair in reversed(items): if not pair.is_many: if isinstance(pair.key, AnnotatedValue): @@ -2285,7 +2281,7 @@ def _preprocess_kwargs_kv_pairs( class OverloadedSignature: """Represent an overloaded function.""" - signatures: Tuple[Signature, ...] + signatures: tuple[Signature, ...] def __init__(self, sigs: Sequence[Signature]) -> None: object.__setattr__(self, "signatures", tuple(sigs)) @@ -2371,9 +2367,9 @@ def check_call( return AnyValue(AnySource.error) errors_per_overload = [] - any_rets: List[CallReturn] = [] - union_rets: List[CallReturn] = [] - union_and_any_rets: List[CallReturn] = [] + any_rets: list[CallReturn] = [] + union_rets: list[CallReturn] = [] + union_and_any_rets: list[CallReturn] = [] sigs = [ sig for sig, bound_args in zip(self.signatures, bound_args_per_overload) @@ -2481,7 +2477,7 @@ def _unite_rets( def _make_detail( self, - errors_per_overload: Sequence[Sequence[Dict[str, Any]]], + errors_per_overload: Sequence[Sequence[dict[str, Any]]], sigs: Sequence[Signature], ) -> CanAssignError: details = [] @@ -2668,7 +2664,7 @@ def make_bound_method( def can_assign_var_positional( my_param: SigParameter, args_annotation: Value, idx: int, ctx: CanAssignContext -) -> Union[List[BoundsMap], CanAssignError]: +) -> Union[list[BoundsMap], CanAssignError]: my_annotation = my_param.get_annotation() if isinstance(args_annotation, SequenceValue): members = args_annotation.get_member_sequence() @@ -2706,7 +2702,7 @@ def can_assign_var_positional( def can_assign_var_keyword( my_param: SigParameter, kwargs_annotation: Value, ctx: CanAssignContext -) -> Union[List[BoundsMap], CanAssignError]: +) -> Union[list[BoundsMap], CanAssignError]: my_annotation = my_param.get_annotation() bounds_maps = [] if isinstance(kwargs_annotation, TypedDictValue): @@ -2751,7 +2747,7 @@ def can_assign_var_keyword( def decompose_union( expected_type: Value, parent_value: Value, ctx: CanAssignContext -) -> Optional[Tuple[BoundsMap, bool, Value]]: +) -> Optional[tuple[BoundsMap, bool, Value]]: value = unannotate(parent_value) if isinstance(value, MultiValuedValue): bounds_maps = [] diff --git a/pyanalyze/stacked_scopes.py b/pyanalyze/stacked_scopes.py index adc46f70..bcdd99eb 100644 --- a/pyanalyze/stacked_scopes.py +++ b/pyanalyze/stacked_scopes.py @@ -29,21 +29,7 @@ from dataclasses import dataclass, field, replace from itertools import chain from types import ModuleType -from typing import ( - Any, - Callable, - ContextManager, - Dict, - FrozenSet, - List, - NamedTuple, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) +from typing import Any, Callable, ContextManager, NamedTuple, Optional, TypeVar, Union import qcore @@ -96,7 +82,7 @@ class ScopeType(enum.Enum): # Nodes as used in scopes can be any object, as long as they are hashable. Node = object # Tag for a Varname that changes when the variable is assigned to. -VarnameOrigin = FrozenSet[Optional[Node]] +VarnameOrigin = frozenset[Optional[Node]] CompositeIndex = Union[str, KnownValue] EMPTY_ORIGIN = frozenset((None,)) @@ -137,7 +123,7 @@ def __str__(self) -> str: class VarnameWithOrigin: varname: str origin: VarnameOrigin = EMPTY_ORIGIN - indices: Sequence[Tuple[CompositeIndex, VarnameOrigin]] = () + indices: Sequence[tuple[CompositeIndex, VarnameOrigin]] = () def extend_with( self, index: CompositeIndex, origin: VarnameOrigin @@ -146,7 +132,7 @@ def extend_with( self.varname, self.origin, (*self.indices, (index, origin)) ) - def get_all_varnames(self) -> Iterable[Tuple[Varname, VarnameOrigin]]: + def get_all_varnames(self) -> Iterable[tuple[Varname, VarnameOrigin]]: yield self.varname, self.origin for i, (_, origin) in enumerate(self.indices): varname = CompositeVariable( @@ -171,7 +157,7 @@ def __str__(self) -> str: return "".join(pieces) -SubScope = Dict[Varname, List[Node]] +SubScope = dict[Varname, list[Node]] # Type for Constraint.value if constraint type is predicate # PredicateFunc = Callable[[Value, bool], Optional[Value]] @@ -515,7 +501,7 @@ def two_lengths(tpl: Union[Tuple[int], Tuple[str, int]]) -> int: varname: VarnameWithOrigin provider: Callable[[Value], Value] - value_transformer: Optional[Callable[[Value, Type[AST], object], Value]] = None + value_transformer: Optional[Callable[[Value, type[AST], object], Value]] = None def apply(self) -> Iterable[Constraint]: return [] @@ -529,7 +515,7 @@ def invert(self) -> AbstractConstraint: class EquivalentConstraint(AbstractConstraint): """Represents multiple constraints that are either all true or all false.""" - constraints: Tuple[AbstractConstraint, ...] + constraints: tuple[AbstractConstraint, ...] def apply(self) -> Iterable["Constraint"]: for cons in self.constraints: @@ -565,7 +551,7 @@ def __str__(self) -> str: class AndConstraint(AbstractConstraint): """Represents the AND of two constraints.""" - constraints: Tuple[AbstractConstraint, ...] + constraints: tuple[AbstractConstraint, ...] def apply(self) -> Iterable["Constraint"]: for cons in self.constraints: @@ -609,7 +595,7 @@ def __str__(self) -> str: class OrConstraint(AbstractConstraint): """Represents the OR of two constraints.""" - constraints: Tuple[AbstractConstraint, ...] + constraints: tuple[AbstractConstraint, ...] def apply(self) -> Iterable[Constraint]: grouped = [self._group_constraints(cons) for cons in self.constraints] @@ -639,7 +625,7 @@ def _constraint_from_list( def _group_constraints( self, abstract_constraint: AbstractConstraint - ) -> Dict[VarnameWithOrigin, List[Constraint]]: + ) -> dict[VarnameWithOrigin, list[Constraint]]: by_varname = defaultdict(list) for constraint in abstract_constraint.apply(): by_varname[constraint.varname].append(constraint) @@ -687,9 +673,9 @@ def __str__(self) -> str: class _ConstrainedValue(Value): """Helper class, only used within a FunctionScope.""" - definition_nodes: FrozenSet[Node] + definition_nodes: frozenset[Node] constraints: Sequence[Constraint] - resolution_cache: Dict[_LookupContext, Value] = field( + resolution_cache: dict[_LookupContext, Value] = field( default_factory=dict, init=False, compare=False, hash=False, repr=False ) @@ -706,12 +692,12 @@ class Scope: """ scope_type: ScopeType - variables: Dict[Varname, Value] = field(default_factory=dict) + variables: dict[Varname, Value] = field(default_factory=dict) parent_scope: Optional["Scope"] = None scope_node: Optional[Node] = None scope_object: Optional[object] = None simplification_limit: Optional[int] = None - declared_types: Dict[str, Tuple[Optional[Value], bool, AST]] = field( + declared_types: dict[str, tuple[Optional[Value], bool, AST]] = field( default_factory=dict ) @@ -731,7 +717,7 @@ def get( node: object, state: VisitorState, from_parent_scope: bool = False, - ) -> Tuple[Value, Optional["Scope"], VarnameOrigin]: + ) -> tuple[Value, Optional["Scope"], VarnameOrigin]: local_value, origin = self.get_local( varname, node, state, from_parent_scope=from_parent_scope ) @@ -758,7 +744,7 @@ def get_local( state: VisitorState, from_parent_scope: bool = False, fallback_value: Optional[Value] = None, - ) -> Tuple[Value, VarnameOrigin]: + ) -> tuple[Value, VarnameOrigin]: if varname in self.variables: return self.variables[varname], EMPTY_ORIGIN else: @@ -793,7 +779,7 @@ def set( self.variables[varname] = unite_values(existing, value) return EMPTY_ORIGIN - def items(self) -> Iterable[Tuple[Varname, Value]]: + def items(self) -> Iterable[tuple[Varname, Value]]: return self.variables.items() def all_variables(self) -> Iterable[Varname]: @@ -1017,13 +1003,13 @@ class FunctionScope(Scope): """ name_to_current_definition_nodes: SubScope - usage_to_definition_nodes: Dict[Tuple[Node, Varname], List[Node]] - definition_node_to_value: Dict[Node, Value] - name_to_all_definition_nodes: Dict[Varname, Set[Node]] - name_to_composites: Dict[Varname, Set[CompositeVariable]] - referencing_value_vars: Dict[Varname, Value] - accessed_from_special_nodes: Set[Varname] - current_loop_scopes: List[SubScope] + usage_to_definition_nodes: dict[tuple[Node, Varname], list[Node]] + definition_node_to_value: dict[Node, Value] + name_to_all_definition_nodes: dict[Varname, set[Node]] + name_to_composites: dict[Varname, set[CompositeVariable]] + referencing_value_vars: dict[Varname, Value] + accessed_from_special_nodes: set[Varname] + current_loop_scopes: list[SubScope] def __init__( self, @@ -1088,7 +1074,7 @@ def _add_single_constraint( self.name_to_current_definition_nodes[varname] = [node] self._add_composite(varname) - def _resolve_origin(self, definers: Iterable[Node]) -> FrozenSet[Node]: + def _resolve_origin(self, definers: Iterable[Node]) -> frozenset[Node]: seen = set() pending = set(definers) out = set() @@ -1143,7 +1129,7 @@ def get_local( state: VisitorState, from_parent_scope: bool = False, fallback_value: Optional[Value] = None, - ) -> Tuple[Value, VarnameOrigin]: + ) -> tuple[Value, VarnameOrigin]: self._add_composite(varname) ctx = _LookupContext(varname, fallback_value, node, state) if from_parent_scope: @@ -1194,7 +1180,7 @@ def get_origin( return EMPTY_ORIGIN return self._resolve_origin(definers) - def get_all_definition_nodes(self) -> Dict[Varname, Set[Node]]: + def get_all_definition_nodes(self) -> dict[Varname, builtins.set[Node]]: """Return a copy of name_to_all_definition_nodes.""" return { key: set(nodes) for key, nodes in self.name_to_all_definition_nodes.items() @@ -1260,7 +1246,7 @@ def subscope(self) -> Iterator[SubScope]: yield new_name_to_nodes @contextlib.contextmanager - def loop_scope(self) -> Iterator[List[SubScope]]: + def loop_scope(self) -> Iterator[list[SubScope]]: loop_scopes = [] with self.subscope() as main_scope: loop_scopes.append(main_scope) @@ -1369,7 +1355,7 @@ def _add_composite(self, varname: Varname) -> None: ) self.name_to_composites[composite].add(varname) - def items(self) -> Iterable[Tuple[Varname, Value]]: + def items(self) -> Iterable[tuple[Varname, Value]]: raise NotImplementedError def all_variables(self) -> Iterable[Varname]: @@ -1396,7 +1382,7 @@ class StackedScopes: def __init__( self, - module_vars: Dict[str, Value], + module_vars: dict[str, Value], module: Optional[ModuleType], *, simplification_limit: Optional[int] = None, @@ -1489,7 +1475,7 @@ def get(self, varname: Varname, node: Node, state: VisitorState) -> Value: def get_with_scope( self, varname: Varname, node: Node, state: VisitorState - ) -> Tuple[Value, Optional[Scope], VarnameOrigin]: + ) -> tuple[Value, Optional[Scope], VarnameOrigin]: """Like :meth:`get`, but also returns the scope object the name was found in. Returns a (:class:`pyanalyze.value.Value`, :class:`Scope`, origin) tuple. The :class:`Scope` @@ -1529,7 +1515,7 @@ def subscope(self) -> ContextManager[SubScope]: """Creates a new subscope (see the :class:`FunctionScope` docstring).""" return self.scopes[-1].subscope() - def loop_scope(self) -> ContextManager[List[SubScope]]: + def loop_scope(self) -> ContextManager[list[SubScope]]: """Creates a new loop scope (see the :class:`FunctionScope` docstring).""" return self.scopes[-1].loop_scope() @@ -1578,7 +1564,7 @@ def constrain_value( ) -def uniq_chain(iterables: Iterable[Iterable[T]]) -> List[T]: +def uniq_chain(iterables: Iterable[Iterable[T]]) -> list[T]: """Returns a flattened list, collapsing equal elements but preserving order.""" return list(OrderedDict.fromkeys(chain.from_iterable(iterables))) diff --git a/pyanalyze/stubs/_pyanalyze_tests-stubs/args.pyi b/pyanalyze/stubs/_pyanalyze_tests-stubs/args.pyi index 6105f832..c31fc34e 100644 --- a/pyanalyze/stubs/_pyanalyze_tests-stubs/args.pyi +++ b/pyanalyze/stubs/_pyanalyze_tests-stubs/args.pyi @@ -1,12 +1,10 @@ -from typing import Tuple - from typing_extensions import TypedDict, Unpack class TD(TypedDict): x: int y: str -def f(*args: Unpack[Tuple[int, str]]) -> None: ... +def f(*args: Unpack[tuple[int, str]]) -> None: ... def g(**kwargs: Unpack[TD]) -> None: ... def h(*args: int) -> None: ... def i(**kwargs: str) -> None: ... diff --git a/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi b/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi index 43c99a4c..be28b435 100644 --- a/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi +++ b/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi @@ -1,8 +1,8 @@ -from typing import AnyStr, ContextManager, Dict, Union +from typing import AnyStr, ContextManager from typing_extensions import TypeAlias class _ScandirIterator(ContextManager[_ScandirIterator[AnyStr]]): def close(self) -> None: ... -StrJson: TypeAlias = Union[str, Dict[str, StrJson]] +StrJson: TypeAlias = str | dict[str, StrJson] diff --git a/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi b/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi index 172e7b2f..a1453ec8 100644 --- a/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi +++ b/pyanalyze/stubs/pyanalyze-stubs/extensions.pyi @@ -3,10 +3,10 @@ # stubs import from non-stub files. from collections.abc import Sequence -from typing import Any, Callable, List, Optional +from typing import Any, Callable def reveal_type(value: object) -> None: ... -def get_overloads(fully_qualified_name: str) -> List[Callable[..., Any]]: ... +def get_overloads(fully_qualified_name: str) -> list[Callable[..., Any]]: ... def get_type_evaluation(fully_qualified_name: str) -> Sequence[Callable[..., Any]]: ... def overload(func: Callable[..., Any]) -> Callable[..., Any]: ... def evaluated(func: Callable[..., Any]) -> Callable[..., Any]: ... @@ -14,6 +14,6 @@ def is_provided(arg: Any) -> bool: ... def is_positional(arg: Any) -> bool: ... def is_keyword(arg: Any) -> bool: ... def is_of_type(arg: Any, type: Any, *, exclude_any: bool = ...) -> bool: ... -def show_error(message: str, *, argument: Optional[Any] = ...) -> bool: ... +def show_error(message: str, *, argument: Any | None = ...) -> bool: ... def deprecated(__message: str) -> Callable[..., Any]: ... def __getattr__(self, __arg: str) -> Any: ... diff --git a/pyanalyze/suggested_type.py b/pyanalyze/suggested_type.py index 823cc108..8cb751d7 100644 --- a/pyanalyze/suggested_type.py +++ b/pyanalyze/suggested_type.py @@ -9,7 +9,7 @@ from collections.abc import Iterator, Mapping, Sequence from dataclasses import dataclass, field from types import FunctionType -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from .error_code import ErrorCode from .node_visitor import ErrorContext, Failure @@ -47,7 +47,7 @@ class CallableData: ctx: ErrorContext sig: Signature scopes: StackedScopes - calls: List[CallArgs] = field(default_factory=list) + calls: list[CallArgs] = field(default_factory=list) def check(self) -> Iterator[Failure]: if not self.calls: @@ -86,8 +86,8 @@ def check(self) -> Iterator[Failure]: @dataclass class CallableTracker: - callable_to_data: Dict[object, CallableData] = field(default_factory=dict) - callable_to_calls: Dict[object, List[CallArgs]] = field( + callable_to_data: dict[object, CallableData] = field(default_factory=dict) + callable_to_calls: dict[object, list[CallArgs]] = field( default_factory=lambda: defaultdict(list) ) @@ -106,7 +106,7 @@ def record_call(self, callable: object, arguments: Mapping[str, Value]) -> None: """Record the actual arguments passed in in a call.""" self.callable_to_calls[callable].append(arguments) - def check(self) -> List[Failure]: + def check(self) -> list[Failure]: failures = [] for callable, calls in self.callable_to_calls.items(): if callable in self.callable_to_data: @@ -118,7 +118,7 @@ def check(self) -> List[Failure]: def display_suggested_type( value: Value, scopes: StackedScopes -) -> Tuple[str, Optional[Dict[str, Any]]]: +) -> tuple[str, Optional[dict[str, Any]]]: value = prepare_type(value) if isinstance(value, MultiValuedValue) and value.vals: cae = CanAssignError("Union", [CanAssignError(str(val)) for val in value.vals]) @@ -204,8 +204,8 @@ def prepare_type(value: Value) -> Value: vals = [prepare_type(subval) for subval in value.vals] # Throw out Anys vals = [val for val in vals if not isinstance(val, AnyValue)] - type_literals: List[Tuple[Value, type]] = [] - rest: List[Value] = [] + type_literals: list[tuple[Value, type]] = [] + rest: list[Value] = [] for subval in vals: if ( isinstance(subval, SubclassValue) diff --git a/pyanalyze/type_evaluation.py b/pyanalyze/type_evaluation.py index 4cea92d3..009adb7e 100644 --- a/pyanalyze/type_evaluation.py +++ b/pyanalyze/type_evaluation.py @@ -11,7 +11,7 @@ from collections.abc import Iterator, Mapping, Sequence from contextlib import contextmanager from dataclasses import dataclass, field -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union +from typing import Any, Callable, Optional, Union import qcore from typing_extensions import Literal @@ -101,7 +101,7 @@ def display(self, negated: bool = False) -> CanAssignError: @dataclass class PlatformCondition(Condition): actual: str - op: Type[ast.cmpop] + op: type[ast.cmpop] expected: object def display(self, negated: bool = False) -> CanAssignError: @@ -115,8 +115,8 @@ def display(self, negated: bool = False) -> CanAssignError: @dataclass class VersionCondition(Condition): - actual: Tuple[int, ...] - op: Type[ast.cmpop] + actual: tuple[int, ...] + op: type[ast.cmpop] expected: object def _display_version(self, version: object) -> str: @@ -191,7 +191,7 @@ def subtract_unions(left: Value, right: Value) -> Value: return unite_values(*remaining) -_Operator = Union[Type[ast.cmpop], Literal["is of type", "is not of type"]] +_Operator = Union[type[ast.cmpop], Literal["is of type", "is not of type"]] @dataclass @@ -205,7 +205,7 @@ def _dummy_impl(left: object, right: object) -> object: raise NotImplementedError -_OP_TO_DATA: Dict[_Operator, _Comparator] = { +_OP_TO_DATA: dict[_Operator, _Comparator] = { ast.Is: _Comparator("is", ast.IsNot, operator.is_), ast.IsNot: _Comparator("is not", ast.Is, operator.is_not), ast.Eq: _Comparator("==", ast.NotEq, operator.eq), @@ -272,13 +272,13 @@ class Evaluator: node: Union[ast.FunctionDef, ast.AsyncFunctionDef] return_annotation: Value - def evaluate(self, ctx: EvalContext) -> Tuple[Value, Sequence[UserRaisedError]]: + def evaluate(self, ctx: EvalContext) -> tuple[Value, Sequence[UserRaisedError]]: visitor = EvaluateVisitor(self, ctx) result = visitor.run() errors = [e for e in visitor.errors if isinstance(e, UserRaisedError)] return result, errors - def validate(self, ctx: EvalContext) -> List[InvalidEvaluation]: + def validate(self, ctx: EvalContext) -> list[InvalidEvaluation]: visitor = EvaluateVisitor(self, ctx, validation_mode=True) visitor.run() return [ @@ -337,7 +337,7 @@ class ConditionEvaluator(ast.NodeVisitor): evaluator: Evaluator ctx: EvalContext validation_mode: bool = False - errors: List[EvaluateError] = field(default_factory=list, init=False) + errors: list[EvaluateError] = field(default_factory=list, init=False) def return_invalid(self, message: str, node: ast.AST) -> ConditionReturn: self.errors.append(InvalidEvaluation(message, node)) @@ -396,10 +396,7 @@ def visit_Call(self, node: ast.Call) -> ConditionReturn: ) else: # Before 3.9 keyword nodes don't have a lineno - if sys.version_info >= (3, 9): - error_node = keyword - else: - error_node = node + error_node = keyword return self.return_invalid( "Invalid keyword argument to is_of_type()", error_node ) @@ -609,8 +606,8 @@ def get_name(self, node: ast.Name) -> Optional[Value]: class EvaluateVisitor(ast.NodeVisitor): evaluator: Evaluator ctx: EvalContext - errors: List[EvaluateError] = field(default_factory=list) - active_conditions: List[Condition] = field(default_factory=list) + errors: list[EvaluateError] = field(default_factory=list) + active_conditions: list[Condition] = field(default_factory=list) validation_mode: bool = False def run(self) -> Value: @@ -727,10 +724,7 @@ def visit_show_error(self, call: ast.Call) -> EvalReturn: return None else: # Before 3.9 keyword nodes don't have a lineno - if sys.version_info >= (3, 9): - error_node = keyword - else: - error_node = call + error_node = keyword self.add_invalid("Invalid keyword argument to show_error()", error_node) return None self.errors.append( @@ -778,7 +772,7 @@ def generic_visit(self, node: ast.AST) -> Any: def decompose_union( expected_type: Value, parent_value: Value, ctx: CanAssignContext, exclude_any: bool -) -> Optional[Tuple[BoundsMap, Value]]: +) -> Optional[tuple[BoundsMap, Value]]: value = unannotate(parent_value) if isinstance(value, MultiValuedValue): bounds_maps = [] diff --git a/pyanalyze/type_object.py b/pyanalyze/type_object.py index a0359507..bf93772f 100644 --- a/pyanalyze/type_object.py +++ b/pyanalyze/type_object.py @@ -8,7 +8,7 @@ import inspect from collections.abc import Container, Sequence from dataclasses import dataclass, field -from typing import Callable, Dict, Set, Union, cast +from typing import Callable, Union, cast from unittest import mock from pyanalyze.signature import ( @@ -52,13 +52,13 @@ def get_mro(typ: Union[type, super]) -> Sequence[type]: @dataclass class TypeObject: typ: Union[type, super, str] - base_classes: Set[Union[type, str]] = field(default_factory=set) + base_classes: set[Union[type, str]] = field(default_factory=set) is_protocol: bool = False - protocol_members: Set[str] = field(default_factory=set) + protocol_members: set[str] = field(default_factory=set) is_thrift_enum: bool = field(init=False) is_universally_assignable: bool = field(init=False) - artificial_bases: Set[type] = field(default_factory=set, init=False) - _protocol_positive_cache: Dict[Value, BoundsMap] = field( + artificial_bases: set[type] = field(default_factory=set, init=False) + _protocol_positive_cache: dict[Value, BoundsMap] = field( default_factory=dict, repr=False ) diff --git a/pyanalyze/typeshed.py b/pyanalyze/typeshed.py index 540735df..b3e7332b 100644 --- a/pyanalyze/typeshed.py +++ b/pyanalyze/typeshed.py @@ -17,7 +17,7 @@ from dataclasses import dataclass, field, replace from enum import EnumMeta from types import GeneratorType, MethodDescriptorType, ModuleType -from typing import Any, Dict, Generic, List, Optional, Set, Tuple, TypeVar, Union +from typing import Any, Generic, Optional, TypeVar, Union import qcore import typeshed_client @@ -110,7 +110,7 @@ def get_attribute(self, root_value: Value, node: ast.Attribute) -> Value: class _DummyErrorContext: - all_failures: List[Failure] = [] + all_failures: list[Failure] = [] def show_error( self, @@ -120,7 +120,7 @@ def show_error( *, detail: Optional[str] = None, save: bool = True, - extra_metadata: Optional[Dict[str, Any]] = None, + extra_metadata: Optional[dict[str, Any]] = None, ) -> Optional[Failure]: return None @@ -152,13 +152,13 @@ class TypeshedFinder: ctx: CanAssignContext = field(repr=False) verbose: bool = True resolver: typeshed_client.Resolver = field(default_factory=typeshed_client.Resolver) - _assignment_cache: Dict[Tuple[str, ast.AST], Value] = field( + _assignment_cache: dict[tuple[str, ast.AST], Value] = field( default_factory=dict, repr=False, init=False ) - _attribute_cache: Dict[Tuple[str, str, bool], Value] = field( + _attribute_cache: dict[tuple[str, str, bool], Value] = field( default_factory=dict, repr=False, init=False ) - _active_infos: List[typeshed_client.resolver.ResolvedName] = field( + _active_infos: list[typeshed_client.resolver.ResolvedName] = field( default_factory=list, repr=False, init=False ) @@ -281,11 +281,11 @@ def get_argspec_for_fully_qualified_name( ) return sig - def get_bases(self, typ: type) -> Optional[List[Value]]: + def get_bases(self, typ: type) -> Optional[list[Value]]: """Return the base classes for this type, including generic bases.""" return self.get_bases_for_value(TypedValue(typ)) - def get_bases_for_value(self, val: Value) -> Optional[List[Value]]: + def get_bases_for_value(self, val: Value) -> Optional[list[Value]]: if isinstance(val, TypedValue): if isinstance(val.typ, type): typ = val.typ @@ -332,7 +332,7 @@ def is_protocol(self, typ: type) -> bool: for base in bases ) - def get_bases_recursively(self, typ: Union[type, str]) -> List[Value]: + def get_bases_recursively(self, typ: Union[type, str]) -> list[Value]: stack = [TypedValue(typ)] seen = set() bases = [] @@ -348,7 +348,7 @@ def get_bases_recursively(self, typ: Union[type, str]) -> List[Value]: bases += new_bases return bases - def get_bases_for_fq_name(self, fq_name: str) -> Optional[List[Value]]: + def get_bases_for_fq_name(self, fq_name: str) -> Optional[list[Value]]: if fq_name in ( "typing.Generic", "typing.Protocol", @@ -386,7 +386,7 @@ def get_attribute_for_fq_name( def get_attribute_recursively( self, fq_name: str, attr: str, *, on_class: bool - ) -> Tuple[Value, Union[type, str, None]]: + ) -> tuple[Value, Union[type, str, None]]: """Get an attribute from a fully qualified class. Returns a tuple (value, provider). @@ -426,7 +426,7 @@ def has_attribute(self, typ: Union[type, str], attr: str) -> bool: return True return False - def get_all_attributes(self, typ: Union[type, str]) -> Set[str]: + def get_all_attributes(self, typ: Union[type, str]) -> set[str]: if isinstance(typ, str): fq_name = typ else: @@ -553,7 +553,7 @@ def _get_value_from_child_info( def _get_child_info( self, info: typeshed_client.resolver.ResolvedName, attr: str, mod: str - ) -> Optional[Tuple[typeshed_client.resolver.ResolvedName, str]]: + ) -> Optional[tuple[typeshed_client.resolver.ResolvedName, str]]: if info is None: return None elif isinstance(info, typeshed_client.ImportedInfo): @@ -585,7 +585,7 @@ def _has_own_attribute(self, typ: Union[type, str], attr: str) -> bool: def _get_all_attributes_from_info( self, info: typeshed_client.resolver.ResolvedName, mod: str - ) -> Set[str]: + ) -> set[str]: if info is None: return set() elif isinstance(info, typeshed_client.ImportedInfo): @@ -632,7 +632,7 @@ def _has_attribute_from_info( def _get_bases_from_info( self, info: typeshed_client.resolver.ResolvedName, mod: str, fq_name: str - ) -> Optional[List[Value]]: + ) -> Optional[list[Value]]: if info is None: return None elif isinstance(info, typeshed_client.ImportedInfo): @@ -888,7 +888,7 @@ def _get_signature_from_func_def( if node.decorator_list: objclass = None args = node.args - arguments: List[SigParameter] = [] + arguments: list[SigParameter] = [] num_pos_only_args = len(args.posonlyargs) defaults = args.defaults num_pos_only_defaults = len(defaults) - len(args.args) diff --git a/pyanalyze/typevar.py b/pyanalyze/typevar.py index 2664cb09..b262ba4a 100644 --- a/pyanalyze/typevar.py +++ b/pyanalyze/typevar.py @@ -5,7 +5,7 @@ """ from collections.abc import Iterable, Sequence -from typing import Tuple, Union +from typing import Union import qcore @@ -36,7 +36,7 @@ def resolve_bounds_map( ctx: CanAssignContext, *, all_typevars: Iterable[TypeVarLike] = (), -) -> Tuple[TypeVarMap, Sequence[CanAssignError]]: +) -> tuple[TypeVarMap, Sequence[CanAssignError]]: tv_map = {tv: AnyValue(AnySource.generic_argument) for tv in all_typevars} errors = [] for tv, bounds in bounds_map.items(): diff --git a/pyanalyze/value.py b/pyanalyze/value.py index a9e64dcc..2bc7f56c 100644 --- a/pyanalyze/value.py +++ b/pyanalyze/value.py @@ -28,19 +28,7 @@ def function(x: int, y: list[int], z: Any): from dataclasses import InitVar, dataclass, field from itertools import chain from types import FunctionType, ModuleType -from typing import ( - Any, - Callable, - ContextManager, - Dict, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) +from typing import Any, Callable, ContextManager, Optional, TypeVar, Union import qcore from typing_extensions import ParamSpec, Protocol, assert_never @@ -153,7 +141,7 @@ def can_overlap( # allow overlap with Never if other is NO_RETURN_VALUE: return None - errors: List[CanAssignError] = [] + errors: list[CanAssignError] = [] for val in other.vals: maybe_error = self.can_overlap(val, ctx, mode) if maybe_error is None: @@ -324,7 +312,7 @@ class CanAssignError: """ message: str = "" - children: List["CanAssignError"] = field(default_factory=list) + children: list["CanAssignError"] = field(default_factory=list) error_code: Optional[Error] = None def display(self, depth: int = 2) -> str: @@ -932,7 +920,7 @@ def can_overlap( def get_generic_args_for_type( self, typ: Union[type, super, str], ctx: CanAssignContext - ) -> Optional[List[Value]]: + ) -> Optional[list[Value]]: if isinstance(self, GenericValue): args = self.args else: @@ -1035,7 +1023,7 @@ class GenericValue(TypedValue): """ - args: Tuple[Value, ...] + args: tuple[Value, ...] """The generic arguments to the type.""" def __init__(self, typ: Union[type, str], args: Iterable[Value]) -> None: @@ -1178,11 +1166,11 @@ class SequenceValue(GenericValue): """ - members: Tuple[Tuple[bool, Value], ...] + members: tuple[tuple[bool, Value], ...] """The elements of the sequence.""" def __init__( - self, typ: Union[type, str], members: Sequence[Tuple[bool, Value]] + self, typ: Union[type, str], members: Sequence[tuple[bool, Value]] ) -> None: if members: args = (unite_values(*[typ for _, typ in members]),) @@ -1209,7 +1197,7 @@ def make_known_value(self) -> Value: @classmethod def make_or_known( - cls, typ: type, members: Sequence[Tuple[bool, Value]] + cls, typ: type, members: Sequence[tuple[bool, Value]] ) -> Union[KnownValue, "SequenceValue"]: known_members = [] for is_many, member in members: @@ -1340,7 +1328,7 @@ class DictIncompleteValue(GenericValue): """ - kv_pairs: Tuple[KVPair, ...] + kv_pairs: tuple[KVPair, ...] """Sequence of :class:`KVPair` objects representing the keys and values of the dict.""" def __init__(self, typ: Union[type, str], kv_pairs: Sequence[KVPair]) -> None: @@ -1379,14 +1367,14 @@ def simplify(self) -> GenericValue: return GenericValue(self.typ, [key, value]) @property - def items(self) -> Sequence[Tuple[Value, Value]]: + def items(self) -> Sequence[tuple[Value, Value]]: """Sequence of pairs representing the keys and values of the dict.""" return [(pair.key, pair.value) for pair in self.kv_pairs] def get_value(self, key: Value, ctx: CanAssignContext) -> Value: """Return the :class:`Value` for a specific key.""" possible_values = [] - covered_keys: Set[Value] = set() + covered_keys: set[Value] = set() for pair in reversed(self.kv_pairs): if not pair.is_many: if isinstance(pair.key, AnnotatedValue): @@ -1426,7 +1414,7 @@ def __str__(self) -> str: class TypedDictValue(GenericValue): """Equivalent to ``typing.TypedDict``; a dictionary with a known set of string keys.""" - items: Dict[str, TypedDictEntry] + items: dict[str, TypedDictEntry] """The items of the ``TypedDict``. Required items are represented as (True, value) and optional ones as (False, value).""" extra_keys: Optional[Value] = None @@ -1436,7 +1424,7 @@ class TypedDictValue(GenericValue): def __init__( self, - items: Dict[str, TypedDictEntry], + items: dict[str, TypedDictEntry], extra_keys: Optional[Value] = None, extra_keys_readonly: bool = False, ) -> None: @@ -1697,7 +1685,7 @@ def substitute_typevars(self, typevars: TypeVarMap) -> "TypedDictValue": ) def __str__(self) -> str: - entries: List[Tuple[str, object]] = list(self.items.items()) + entries: list[tuple[str, object]] = list(self.items.items()) if self.extra_keys is not None and self.extra_keys is not NO_RETURN_VALUE: extra_typ = str(self.extra_keys) if self.extra_keys_readonly: @@ -1956,9 +1944,9 @@ class MultiValuedValue(Value): """Equivalent of ``typing.Union``. Represents the union of multiple values.""" raw_vals: InitVar[Iterable[Value]] - vals: Tuple[Value, ...] = field(init=False) + vals: tuple[Value, ...] = field(init=False) """The underlying values of the union.""" - _known_subvals: Optional[Tuple[Set[Tuple[object, type]], Sequence[Value]]] = field( + _known_subvals: Optional[tuple[set[tuple[object, type]], Sequence[Value]]] = field( init=False, repr=False, hash=False, compare=False ) @@ -1972,7 +1960,7 @@ def __post_init__(self, raw_vals: Iterable[Value]) -> None: def _get_known_subvals( self, - ) -> Optional[Tuple[Set[Tuple[object, type]], Sequence[Value]]]: + ) -> Optional[tuple[set[tuple[object, type]], Sequence[Value]]]: # Not worth it for small unions if len(self.vals) < 10: return None @@ -2049,7 +2037,7 @@ def can_overlap( ) -> Optional[CanAssignError]: if not self.vals: return None - errors: List[CanAssignError] = [] + errors: list[CanAssignError] = [] for val in self.vals: error = val.can_overlap(other, ctx, mode) if error is None: @@ -2082,9 +2070,9 @@ def __ne__(self, other: Value) -> bool: def __str__(self) -> str: if not self.vals: return "Never" - literals: List[KnownValue] = [] + literals: list[KnownValue] = [] has_none = False - others: List[Value] = [] + others: list[Value] = [] for val in self.vals: if val == KnownValue(None): has_none = True @@ -2585,7 +2573,7 @@ class AnnotatedValue(Value): value: Value """The underlying value.""" - metadata: Tuple[Union[Value, Extension], ...] + metadata: tuple[Union[Value, Extension], ...] """The extensions associated with this value.""" def __init__( @@ -2642,19 +2630,19 @@ def walk_values(self) -> Iterable[Value]: for val in self.metadata: yield from val.walk_values() - def get_metadata_of_type(self, typ: Type[T]) -> Iterable[T]: + def get_metadata_of_type(self, typ: type[T]) -> Iterable[T]: """Return any metadata of the given type.""" for data in self.metadata: if isinstance(data, typ): yield data - def get_custom_check_of_type(self, typ: Type[T]) -> Iterable[T]: + def get_custom_check_of_type(self, typ: type[T]) -> Iterable[T]: """Return any CustomChecks of the given type in the metadata.""" for custom_check in self.get_metadata_of_type(CustomCheckExtension): if isinstance(custom_check.custom_check, typ): yield custom_check.custom_check - def has_metadata_of_type(self, typ: Type[Extension]) -> bool: + def has_metadata_of_type(self, typ: type[Extension]) -> bool: """Return whether there is metadat of the given type.""" return any(isinstance(data, typ) for data in self.metadata) @@ -2671,7 +2659,7 @@ class UnpackedValue(Value): value: Value - def get_elements(self) -> Optional[Sequence[Tuple[bool, Value]]]: + def get_elements(self) -> Optional[Sequence[tuple[bool, Value]]]: if isinstance(self.value, SequenceValue) and self.value.typ is tuple: return self.value.members elif isinstance(self.value, GenericValue) and self.value.typ is tuple: @@ -2702,7 +2690,7 @@ def __init__(self, varnames: Iterable[str]) -> None: super().__init__(AnySource.variable_name) object.__setattr__(self, "varnames", tuple(varnames)) - varnames: Tuple[str, ...] + varnames: tuple[str, ...] def can_assign(self, other: Value, ctx: CanAssignContext) -> CanAssign: if not isinstance(other, VariableNameValue): @@ -2717,11 +2705,11 @@ def can_overlap( return None def __str__(self) -> str: - return "" % ", ".join(self.varnames) + return "".format(", ".join(self.varnames)) @classmethod def from_varname( - cls, varname: str, varname_map: Dict[str, "VariableNameValue"] + cls, varname: str, varname_map: dict[str, "VariableNameValue"] ) -> Optional["VariableNameValue"]: """Returns the VariableNameValue corresponding to a variable name. @@ -2803,7 +2791,7 @@ def unify_bounds_maps(bounds_maps: Sequence[BoundsMap]) -> BoundsMap: def intersect_bounds_maps(bounds_maps: Sequence[BoundsMap]) -> BoundsMap: - intermediate: Dict[TypeVarLike, Set[Tuple[Bound, ...]]] = {} + intermediate: dict[TypeVarLike, set[tuple[Bound, ...]]] = {} for bounds_map in bounds_maps: for tv, bounds in bounds_map.items(): intermediate.setdefault(tv, set()).add(tuple(bounds)) @@ -2844,8 +2832,8 @@ def annotate_value(origin: Value, metadata: Sequence[Union[Value, Extension]]) - def unannotate_value( - origin: Value, extension: Type[ExtensionT] -) -> Tuple[Value, Sequence[ExtensionT]]: + origin: Value, extension: type[ExtensionT] +) -> tuple[Value, Sequence[ExtensionT]]: if not isinstance(origin, AnnotatedValue): return origin, [] matches = [ @@ -3231,7 +3219,7 @@ def is_async_iterable( def _create_unpacked_list( iterable_type: Value, target_length: int, post_starred_length: Optional[int] -) -> List[Value]: +) -> list[Value]: if post_starred_length is not None: return [ *([iterable_type] * target_length), @@ -3373,7 +3361,7 @@ def stringify_object(obj: Any) -> str: def can_assign_and_used_any( param_typ: Value, var_value: Value, ctx: CanAssignContext -) -> Tuple[CanAssign, bool]: +) -> tuple[CanAssign, bool]: with ctx.reset_any_used(): tv_map = param_typ.can_assign(var_value, ctx) used_any = ctx.has_used_any_match() diff --git a/pyanalyze/yield_checker.py b/pyanalyze/yield_checker.py index 66687803..66489a40 100644 --- a/pyanalyze/yield_checker.py +++ b/pyanalyze/yield_checker.py @@ -15,7 +15,7 @@ import logging from collections.abc import Iterator, Sequence from dataclasses import dataclass, field -from typing import Any, Callable, ContextManager, Dict, List, Optional, Set, Tuple +from typing import Any, Callable, ContextManager, Optional import asynq import qcore @@ -38,8 +38,8 @@ class YieldInfo: yield_node: ast.Yield statement_node: ast.stmt - lines: List[str] - line_range: List[int] = field(init=False) + lines: list[str] + line_range: list[int] = field(init=False) def __post_init__(self) -> None: self.line_range = get_line_range_for_node(self.statement_node, self.lines) @@ -52,7 +52,7 @@ def is_assign_or_expr(self) -> bool: def get_indentation(self) -> int: return get_indentation(self.lines[self.statement_node.lineno - 1]) - def target_and_value(self) -> Tuple[List[ast.expr], List[ast.expr]]: + def target_and_value(self) -> tuple[list[ast.expr], list[ast.expr]]: """Returns a pair of a list of target nodes and a list of value nodes.""" assert self.yield_node.value is not None if isinstance(self.statement_node, ast.Assign): @@ -158,15 +158,15 @@ def _ensure_unique(self, varname: str) -> str: @dataclass class YieldChecker: visitor: "pyanalyze.name_check_visitor.NameCheckVisitor" - variables_from_yield_result: Dict[str, bool] = field(default_factory=dict) + variables_from_yield_result: dict[str, bool] = field(default_factory=dict) in_yield_result_assignment: bool = False in_non_async_yield: bool = False last_yield_in_aug_assign: bool = False previous_yield: Optional[ast.Yield] = None statement_for_previous_yield: Optional[ast.stmt] = None - used_varnames: Set[str] = field(default_factory=set) + used_varnames: set[str] = field(default_factory=set) current_function_node: Optional[FunctionNode] = None - alerted_nodes: Set[FunctionNode] = field(default_factory=set) + alerted_nodes: set[FunctionNode] = field(default_factory=set) def set_function_node(self, node: FunctionNode) -> ContextManager[None]: return qcore.override(self, "current_function_node", node) @@ -290,7 +290,7 @@ def _check_for_duplicate_yields( return duplicate_indices = {} # index to first index - seen: Dict[str, int] = {} # ast.dump result to index + seen: dict[str, int] = {} # ast.dump result to index for i, member in enumerate(node.value.elts): # identical AST nodes don't compare equally, so just stringify them for comparison code = ast.dump(member) @@ -382,7 +382,7 @@ def show_unnecessary_yield_error( node, message, ErrorCode.unnecessary_yield, replacement=replacement ) - def _lines_of_node(self, yield_node: ast.Yield) -> List[int]: + def _lines_of_node(self, yield_node: ast.Yield) -> list[int]: """Returns the lines that the given yield node occupies.""" first_lineno = yield_node.lineno lines = self.visitor._lines() @@ -523,7 +523,7 @@ def _create_replacement_for_yield_nodes( def _move_out_var_from_yield( self, yield_info: YieldInfo, indentation: int - ) -> Tuple[List[str], Optional[Replacement]]: + ) -> tuple[list[str], Optional[Replacement]]: """Helper for splitting up a yield node and moving it to an earlier place. For example, it will help turn: @@ -611,7 +611,7 @@ def is_async_fn(self, obj: Any) -> bool: def _camel_case_to_snake_case(s: str) -> str: """Converts a CamelCase string to snake_case.""" - out: List[str] = [] + out: list[str] = [] last_was_uppercase = False for c in s: if c.isupper(): From 9d8fda0e0b62b1e6a5dd66731b3df5aa386a6f21 Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Thu, 10 Oct 2024 17:42:03 -0700 Subject: [PATCH 3/4] Manual fixes for the rest --- pyanalyze/annotations.py | 14 +++----------- pyanalyze/checker.py | 8 ++++---- pyanalyze/format_strings.py | 10 ++++++++-- pyanalyze/name_check_visitor.py | 18 +++++------------- pyanalyze/node_visitor.py | 3 ++- pyanalyze/stacked_scopes.py | 9 +++++---- .../_pyanalyze_tests-stubs/contextmanager.pyi | 4 ++-- .../stubs/_pyanalyze_tests-stubs/recursion.pyi | 5 +++-- pyanalyze/value.py | 9 +++++---- pyanalyze/yield_checker.py | 9 ++++++--- 10 files changed, 43 insertions(+), 46 deletions(-) diff --git a/pyanalyze/annotations.py b/pyanalyze/annotations.py index 54455fd3..ea8c3101 100644 --- a/pyanalyze/annotations.py +++ b/pyanalyze/annotations.py @@ -29,17 +29,9 @@ import contextlib import typing from collections.abc import Callable, Container, Generator, Hashable, Mapping, Sequence +from contextlib import AbstractContextManager from dataclasses import InitVar, dataclass, field -from typing import ( - TYPE_CHECKING, - Any, - ContextManager, - NewType, - Optional, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, NewType, Optional, TypeVar, Union, cast import qcore import typing_extensions @@ -139,7 +131,7 @@ class Context: """While this is True, no errors are shown for undefined names.""" _being_evaluated: set[int] = field(default_factory=set, init=False) - def suppress_undefined_names(self) -> ContextManager[None]: + def suppress_undefined_names(self) -> AbstractContextManager[None]: """Temporarily suppress errors about undefined names.""" return qcore.override(self, "should_suppress_undefined_names", True) diff --git a/pyanalyze/checker.py b/pyanalyze/checker.py index 837ae612..d7891be3 100644 --- a/pyanalyze/checker.py +++ b/pyanalyze/checker.py @@ -9,9 +9,9 @@ import sys import types from collections.abc import Iterable, Iterator, Sequence -from contextlib import contextmanager +from contextlib import AbstractContextManager, contextmanager from dataclasses import InitVar, dataclass, field -from typing import Callable, ContextManager, Optional, Union +from typing import Callable, Optional, Union import qcore @@ -263,12 +263,12 @@ def record_any_used(self) -> None: """Record that Any was used to secure a match.""" self._has_used_any_match = True - def reset_any_used(self) -> ContextManager[None]: + def reset_any_used(self) -> AbstractContextManager[None]: """Context that resets the value used by :meth:`has_used_any_match` and :meth:`record_any_match`.""" return qcore.override(self, "_has_used_any_match", False) - def set_exclude_any(self) -> ContextManager[None]: + def set_exclude_any(self) -> AbstractContextManager[None]: """Within this context, `Any` is compatible only with itself.""" return qcore.override(self, "_should_exclude_any", True) diff --git a/pyanalyze/format_strings.py b/pyanalyze/format_strings.py index 4834123b..8f9d5a73 100644 --- a/pyanalyze/format_strings.py +++ b/pyanalyze/format_strings.py @@ -372,9 +372,15 @@ def accept_tuple_args_no_mvv( num_args = len(all_args) num_specifiers = len(specifiers) if num_args < num_specifiers: - yield f"too few arguments to format string: got {num_args} but expected {num_specifiers}" + yield ( + f"too few arguments to format string: " + f"got {num_args} but expected {num_specifiers}" + ) elif num_args > num_specifiers: - yield f"too many arguments to format string: got {num_args} but expected {num_specifiers}" + yield ( + f"too many arguments to format string: " + f"got {num_args} but expected {num_specifiers}" + ) else: for arg, specifier in zip(all_args, specifiers): yield from specifier.accept(arg, ctx) diff --git a/pyanalyze/name_check_visitor.py b/pyanalyze/name_check_visitor.py index 2cf5ec51..448610df 100644 --- a/pyanalyze/name_check_visitor.py +++ b/pyanalyze/name_check_visitor.py @@ -28,19 +28,11 @@ from abc import abstractmethod from argparse import ArgumentParser from collections.abc import Container, Generator, Iterable, Iterator, Mapping, Sequence +from contextlib import AbstractContextManager from dataclasses import dataclass from itertools import chain from pathlib import Path -from typing import ( - Annotated, - Any, - Callable, - ClassVar, - ContextManager, - Optional, - TypeVar, - Union, -) +from typing import Annotated, Any, Callable, ClassVar, Optional, TypeVar, Union from unittest.mock import ANY import asynq @@ -1236,7 +1228,7 @@ def can_assume_compatibility(self, left: TypeObject, right: TypeObject) -> bool: def assume_compatibility( self, left: TypeObject, right: TypeObject - ) -> ContextManager[None]: + ) -> AbstractContextManager[None]: return self.checker.assume_compatibility(left, right) def has_used_any_match(self) -> bool: @@ -1247,12 +1239,12 @@ def record_any_used(self) -> None: """Record that Any was used to secure a match.""" self._has_used_any_match = True - def reset_any_used(self) -> ContextManager[None]: + def reset_any_used(self) -> AbstractContextManager[None]: """Context that resets the value used by :meth:`has_used_any_match` and :meth:`record_any_match`.""" return qcore.override(self, "_has_used_any_match", False) - def set_exclude_any(self) -> ContextManager[None]: + def set_exclude_any(self) -> AbstractContextManager[None]: """Within this context, `Any` is compatible only with itself.""" return qcore.override(self, "_should_exclude_any", True) diff --git a/pyanalyze/node_visitor.py b/pyanalyze/node_visitor.py index ee5a49d7..8b3b1d49 100644 --- a/pyanalyze/node_visitor.py +++ b/pyanalyze/node_visitor.py @@ -821,7 +821,8 @@ def _get_argument_parser(cls) -> argparse.ArgumentParser: for code in cls.error_code_enum: enabled_string = "on" if cls.is_enabled_by_default(code) else "off" code_descriptions.append( - f" - {code.name}: {cls.get_description_for_error_code(code)} (default: {enabled_string})" + f" - {code.name}: {cls.get_description_for_error_code(code)}" + f" (default: {enabled_string})" ) epilog = "Supported checks:\n" + "\n".join(code_descriptions) diff --git a/pyanalyze/stacked_scopes.py b/pyanalyze/stacked_scopes.py index bcdd99eb..a9f7e704 100644 --- a/pyanalyze/stacked_scopes.py +++ b/pyanalyze/stacked_scopes.py @@ -26,10 +26,11 @@ from ast import AST from collections import OrderedDict, defaultdict from collections.abc import Iterable, Iterator, Sequence +from contextlib import AbstractContextManager from dataclasses import dataclass, field, replace from itertools import chain from types import ModuleType -from typing import Any, Callable, ContextManager, NamedTuple, Optional, TypeVar, Union +from typing import Any, Callable, NamedTuple, Optional, TypeVar, Union import qcore @@ -1508,14 +1509,14 @@ def set( """ self.scopes[-1].set(varname, value, node, state) - def suppressing_subscope(self) -> ContextManager[SubScope]: + def suppressing_subscope(self) -> AbstractContextManager[SubScope]: return self.scopes[-1].suppressing_subscope() - def subscope(self) -> ContextManager[SubScope]: + def subscope(self) -> AbstractContextManager[SubScope]: """Creates a new subscope (see the :class:`FunctionScope` docstring).""" return self.scopes[-1].subscope() - def loop_scope(self) -> ContextManager[list[SubScope]]: + def loop_scope(self) -> AbstractContextManager[list[SubScope]]: """Creates a new loop scope (see the :class:`FunctionScope` docstring).""" return self.scopes[-1].loop_scope() diff --git a/pyanalyze/stubs/_pyanalyze_tests-stubs/contextmanager.pyi b/pyanalyze/stubs/_pyanalyze_tests-stubs/contextmanager.pyi index 5550b0b0..269ba9d0 100644 --- a/pyanalyze/stubs/_pyanalyze_tests-stubs/contextmanager.pyi +++ b/pyanalyze/stubs/_pyanalyze_tests-stubs/contextmanager.pyi @@ -1,3 +1,3 @@ -from typing import ContextManager +from contextlib import AbstractContextManager -def cm() -> ContextManager[int]: ... +def cm() -> AbstractContextManager[int]: ... diff --git a/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi b/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi index be28b435..38b74fb7 100644 --- a/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi +++ b/pyanalyze/stubs/_pyanalyze_tests-stubs/recursion.pyi @@ -1,8 +1,9 @@ -from typing import AnyStr, ContextManager +from contextlib import AbstractContextManager +from typing import AnyStr from typing_extensions import TypeAlias -class _ScandirIterator(ContextManager[_ScandirIterator[AnyStr]]): +class _ScandirIterator(AbstractContextManager[_ScandirIterator[AnyStr]]): def close(self) -> None: ... StrJson: TypeAlias = str | dict[str, StrJson] diff --git a/pyanalyze/value.py b/pyanalyze/value.py index 2bc7f56c..58a76902 100644 --- a/pyanalyze/value.py +++ b/pyanalyze/value.py @@ -25,10 +25,11 @@ def function(x: int, y: list[int], z: Any): import textwrap from collections import deque from collections.abc import Iterable, Iterator, Mapping, Sequence +from contextlib import AbstractContextManager from dataclasses import InitVar, dataclass, field from itertools import chain from types import FunctionType, ModuleType -from typing import Any, Callable, ContextManager, Optional, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union import qcore from typing_extensions import ParamSpec, Protocol, assert_never @@ -274,7 +275,7 @@ def assume_compatibility( self, left: "pyanalyze.type_object.TypeObject", right: "pyanalyze.type_object.TypeObject", - ) -> ContextManager[None]: + ) -> AbstractContextManager[None]: return qcore.empty_context def has_used_any_match(self) -> bool: @@ -284,12 +285,12 @@ def has_used_any_match(self) -> bool: def record_any_used(self) -> None: """Record that Any was used to secure a match.""" - def reset_any_used(self) -> ContextManager[None]: + def reset_any_used(self) -> AbstractContextManager[None]: """Context that resets the value used by :meth:`has_used_any_match` and :meth:`record_any_match`.""" return qcore.empty_context - def set_exclude_any(self) -> ContextManager[None]: + def set_exclude_any(self) -> AbstractContextManager[None]: """Within this context, `Any` is compatible only with itself.""" return qcore.empty_context diff --git a/pyanalyze/yield_checker.py b/pyanalyze/yield_checker.py index 66489a40..76ee9b0e 100644 --- a/pyanalyze/yield_checker.py +++ b/pyanalyze/yield_checker.py @@ -14,8 +14,9 @@ import itertools import logging from collections.abc import Iterator, Sequence +from contextlib import AbstractContextManager from dataclasses import dataclass, field -from typing import Any, Callable, ContextManager, Optional +from typing import Any, Callable, Optional import asynq import qcore @@ -168,7 +169,7 @@ class YieldChecker: current_function_node: Optional[FunctionNode] = None alerted_nodes: set[FunctionNode] = field(default_factory=set) - def set_function_node(self, node: FunctionNode) -> ContextManager[None]: + def set_function_node(self, node: FunctionNode) -> AbstractContextManager[None]: return qcore.override(self, "current_function_node", node) @contextlib.contextmanager @@ -197,7 +198,9 @@ def check_yield( # Unnecessary yield checking - def check_yield_result_assignment(self, in_yield: bool) -> ContextManager[None]: + def check_yield_result_assignment( + self, in_yield: bool + ) -> AbstractContextManager[None]: return qcore.override(self, "in_yield_result_assignment", in_yield) def record_assignment(self, name: str) -> None: From a573d489f8c3eea23081222da5684fd0ce318c1b Mon Sep 17 00:00:00 2001 From: Jelle Zijlstra Date: Thu, 10 Oct 2024 17:57:19 -0700 Subject: [PATCH 4/4] fix --- pyanalyze/annotations.py | 2 +- pyproject.toml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/pyanalyze/annotations.py b/pyanalyze/annotations.py index ea8c3101..f71ab76d 100644 --- a/pyanalyze/annotations.py +++ b/pyanalyze/annotations.py @@ -784,7 +784,7 @@ def _type_from_subscripted_value( ctx.show_error("Optional[] takes only one argument") return AnyValue(AnySource.error) return unite_values(KnownValue(None), _type_from_value(members[0], ctx)) - elif root is type or root is type: + elif root is typing.Type or root is type: if len(members) != 1: ctx.show_error("Type[] takes only one argument") return AnyValue(AnySource.error) diff --git a/pyproject.toml b/pyproject.toml index df4ea726..583b6f19 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,3 +76,6 @@ ignore = [ "pyanalyze/test_*.py" = [ "UP", # Want to test old-style code ] +"pyanalyze/annotations.py" = [ + "UP006", # Need to refer to typing.Type +]